def load_data_product(self): dset_i = 0 dataset_management = DatasetManagementServiceClient() pubsub_management = PubsubManagementServiceClient() data_product_management = DataProductManagementServiceClient() resource_registry = self.container.instance.resource_registry dp_obj = DataProduct( name='instrument_data_product_%i' % dset_i, description='ctd stream test', processing_level_code='Parsed_Canonical') pdict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) stream_def_id = pubsub_management.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id) data_product_id = data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id) self.addCleanup(data_product_management.delete_data_product, data_product_id) data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(data_product_management.suspend_data_product_persistence, data_product_id) stream_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasStream', id_only=True) stream_id = stream_ids[0] route = pubsub_management.read_stream_route(stream_id) dataset_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True) dataset_id = dataset_ids[0] return data_product_id, stream_id, route, stream_def_id, dataset_id
def load_data_product(self): dset_i = 0 dataset_management = DatasetManagementServiceClient() pubsub_management = PubsubManagementServiceClient() data_product_management = DataProductManagementServiceClient() resource_registry = self.container.instance.resource_registry tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = DataProduct( name='instrument_data_product_%i' % dset_i, description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain = tdom, spatial_domain = sdom) pdict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) stream_def_id = pubsub_management.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id) data_product_id = data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id) self.addCleanup(data_product_management.delete_data_product, data_product_id) data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(data_product_management.suspend_data_product_persistence, data_product_id) stream_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasStream', id_only=True) stream_id = stream_ids[0] route = pubsub_management.read_stream_route(stream_id) dataset_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True) dataset_id = dataset_ids[0] return data_product_id, stream_id, route, stream_def_id, dataset_id
class DatasetLoadTest(IonIntegrationTestCase): """ The following integration tests (INTMAN) are to ONLY be run manually """ def setUp(self): self.username = CFG.get_safe('eoi.geoserver.user_name', 'admin') self.PASSWORD = CFG.get_safe('eoi.geoserver.password', 'geoserver') self.gs_host = CFG.get_safe('eoi.geoserver.server', 'http://*****:*****@unittest.skipIf(not (CFG.get_safe('eoi.meta.use_eoi_services', False)), 'Skip test in TABLE LOADER as services are not loaded') def test_create_dataset(self): ph = ParameterHelper(self.dataset_management, self.addCleanup) pdict_id = ph.create_extended_parsed() stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) dp = DataProduct(name='example') data_product_id = self.data_product_management.create_data_product(dp, stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) self.data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id) dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0] monitor = DatasetMonitor(dataset_id) self.addCleanup(monitor.stop) rdt = ph.get_rdt(stream_def_id) ph.fill_rdt(rdt, 100) ph.publish_rdt_to_data_product(data_product_id, rdt) self.assertTrue(monitor.event.wait(10)) # Yield to other greenlets, had an issue with connectivity gevent.sleep(1) log.debug("--------------------------------") log.debug(dataset_id) coverage_path = DatasetManagementService()._get_coverage_path(dataset_id) log.debug(coverage_path) log.debug("--------------------------------") breakpoint(locals(), globals())
class DMTestCase(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient() def create_stream_definition(self, *args, **kwargs): stream_def_id = self.pubsub_management.create_stream_definition(*args, **kwargs) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) return stream_def_id def create_data_product(self,name, stream_def_id='', param_dict_name='', pdict_id=''): if not (stream_def_id or param_dict_name or pdict_id): raise AssertionError('Attempted to create a Data Product without a parameter dictionary') tdom, sdom = time_series_domain() dp = DataProduct(name=name, spatial_domain = sdom.dump(), temporal_domain = tdom.dump(), ) stream_def_id = stream_def_id or self.create_stream_definition('%s stream def' % name, parameter_dictionary_id=pdict_id or self.RR2.find_resource_by_name(RT.ParameterDictionary, param_dict_name, id_only=True)) data_product_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) return data_product_id def activate_data_product(self, data_product_id): self.data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id) def data_product_by_id(self, alt_id): data_products, _ = self.container.resource_registry.find_resources_ext(alt_id=alt_id, alt_id_ns='PRE', id_only=True) if data_products: return data_products[0] return None def dataset_of_data_product(self, data_product_id): return self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
class DMTestCase(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient() def create_stream_definition(self, *args, **kwargs): stream_def_id = self.pubsub_management.create_stream_definition(*args, **kwargs) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) return stream_def_id def create_data_product(self,name, stream_def_id='', param_dict_name='', pdict_id=''): if not (stream_def_id or param_dict_name or pdict_id): raise AssertionError('Attempted to create a Data Product without a parameter dictionary') tdom, sdom = time_series_domain() dp = DataProduct(name=name, spatial_domain = sdom.dump(), temporal_domain = tdom.dump(), ) stream_def_id = stream_def_id or self.create_stream_definition('%s stream def' % name, parameter_dictionary_id=pdict_id or self.RR2.find_resource_by_name(RT.ParameterDictionary, param_dict_name, id_only=True)) data_product_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) return data_product_id def activate_data_product(self, data_product_id): self.data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
class TestActivateInstrumentIntegration(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url("res/deploy/r2deploy.yml") # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) # setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] def create_logger(self, name, stream_id=""): # logger process producer_definition = ProcessDefinition(name=name + "_logger") producer_definition.executable = { "module": "ion.processes.data.stream_granule_logger", "class": "StreamGranuleLogger", } logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition) configuration = {"process": {"stream_id": stream_id}} pid = self.processdispatchclient.schedule_process( process_definition_id=logger_procdef_id, configuration=configuration ) return pid @unittest.skip("TBD") def test_activateInstrumentSample(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject( RT.InstrumentModel, name="SBE37IMModel", description="SBE37IMModel", model="SBE37IMModel" ) try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" % ex) log.debug("new InstrumentModel id = %s ", instModel_id) # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name="agent007", description="SBE37IMAgent", driver_module="ion.agents.instrument.instrument_agent", driver_class="InstrumentAgent", ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" % ex) log.debug("new InstrumentAgent id = %s", instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug( "test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) " ) instDevice_obj = IonObject( RT.InstrumentDevice, name="SBE37IMDevice", description="SBE37IMDevice", serial_number="12345" ) try: instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id ) instAgentInstance_obj = IonObject( RT.InstrumentAgentInstance, name="SBE37IMAgentInstance", description="SBE37IMAgentInstance", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver", comms_device_address="sbe37-simulator.oceanobservatories.org", comms_device_port=4001, port_agent_work_dir="/tmp/", port_agent_delimeter=["<<", ">>"], ) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id ) # create a stream definition for the data from the ctd simulator ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def) log.debug("new Stream Definition id = %s", instDevice_id) log.debug("Creating new CDM data product with a stream definition") craft = CoverageCraft sdom, tdom = craft.create_domains() sdom = sdom.dump() tdom = tdom.dump() parameter_dictionary = craft.create_parameters() parameter_dictionary = parameter_dictionary.dump() dp_obj = IonObject( RT.DataProduct, name="the parsed data", description="ctd stream test", temporal_domain=tdom, spatial_domain=sdom, ) data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary) log.debug("new dp_id = %s", data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug("Data product streams1 = %s", stream_ids) pid = self.create_logger("ctd_parsed", stream_ids[0]) self.loggerpids.append(pid) # ------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition # ------------------------------- log.debug("test_activateInstrumentSample: create data process definition ctd_L0_all") dpd_obj = IonObject( RT.DataProcessDefinition, name="ctd_L0_all", description="transform ctd package into three separate L0 streams", module="ion.processes.data.transforms.ctd.ctd_L0_all", class_name="ctd_L0_all", process_source="some_source_reference", ) try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new ctd_L0_all data process definition: %s" % ex) # ------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products # ------------------------------- outgoing_stream_l0_conductivity = L0_conductivity_stream_definition() outgoing_stream_l0_conductivity_id = self.pubsubcli.create_stream_definition( container=outgoing_stream_l0_conductivity, name="L0_Conductivity" ) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id ) outgoing_stream_l0_pressure = L0_pressure_stream_definition() outgoing_stream_l0_pressure_id = self.pubsubcli.create_stream_definition( container=outgoing_stream_l0_pressure, name="L0_Pressure" ) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id ) outgoing_stream_l0_temperature = L0_temperature_stream_definition() outgoing_stream_l0_temperature_id = self.pubsubcli.create_stream_definition( container=outgoing_stream_l0_temperature, name="L0_Temperature" ) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id ) self.output_products = {} log.debug("test_createTransformsThenActivateInstrument: create output data product L0 conductivity") ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name="L0_Conductivity", description="transform output conductivity", temporal_domain=tdom, spatial_domain=sdom, ) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id, parameter_dictionary ) self.output_products["conductivity"] = ctd_l0_conductivity_output_dp_id self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id) stream_ids, _ = self.rrclient.find_objects(ctd_l0_conductivity_output_dp_id, PRED.hasStream, None, True) log.debug(" ctd_l0_conductivity stream id = %s", str(stream_ids)) pid = self.create_logger(" ctd_l1_conductivity", stream_ids[0]) self.loggerpids.append(pid) log.debug("test_createTransformsThenActivateInstrument: create output data product L0 pressure") ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name="L0_Pressure", description="transform output pressure", temporal_domain=tdom, spatial_domain=sdom, ) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id, parameter_dictionary ) self.output_products["pressure"] = ctd_l0_pressure_output_dp_id self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id) stream_ids, _ = self.rrclient.find_objects(ctd_l0_pressure_output_dp_id, PRED.hasStream, None, True) log.debug(" ctd_l0_pressure stream id = %s", str(stream_ids)) pid = self.create_logger(" ctd_l0_pressure", stream_ids[0]) self.loggerpids.append(pid) log.debug("test_createTransformsThenActivateInstrument: create output data product L0 temperature") ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name="L0_Temperature", description="transform output temperature", temporal_domain=tdom, spatial_domain=sdom, ) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id, parameter_dictionary ) self.output_products["temperature"] = ctd_l0_temperature_output_dp_id self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id) stream_ids, _ = self.rrclient.find_objects(ctd_l0_temperature_output_dp_id, PRED.hasStream, None, True) log.debug(" ctd_l0_temperature stream id = %s", str(stream_ids)) pid = self.create_logger(" ctd_l0_temperature", stream_ids[0]) self.loggerpids.append(pid) # ------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process # ------------------------------- log.debug("test_activateInstrumentSample: create L0 all data_process start") try: ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( ctd_L0_all_dprocdef_id, [data_product_id1], self.output_products ) self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) log.debug("test_createTransformsThenActivateInstrument: create L0 all data_process return") log.debug("Creating new RAW data product with a stream definition") raw_stream_def = SBE37_RAW_stream_definition() raw_stream_def_id = self.pubsubcli.create_stream_definition(container=raw_stream_def) dp_obj = IonObject( RT.DataProduct, name="the raw data", description="raw stream test", temporal_domain=tdom, spatial_domain=sdom, ) data_product_id2 = self.dpclient.create_data_product(dp_obj, raw_stream_def_id, parameter_dictionary) log.debug("new dp_id = %s", str(data_product_id2)) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug("Data product streams2 = %s", str(stream_ids)) self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(instAgentInstance_id) log.debug("Instrument agent instance obj: = %s", str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. # self._ia_client = ResourceAgentClient('123xyz', name=inst_agent_instance_obj.agent_process_id, process=FakeProcess()) self._ia_client = ResourceAgentClient(instDevice_id, process=FakeProcess()) log.debug("test_activateInstrumentSample: got ia client %s", str(self._ia_client)) cmd = AgentCommand(command="initialize") retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: initialize %s", str(retval)) time.sleep(1) log.debug("test_activateInstrumentSample: Sending go_active command (L4-CI-SA-RQ-334)") cmd = AgentCommand(command="go_active") reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrument: return value from go_active %s", str(reply)) time.sleep(1) cmd = AgentCommand(command="get_current_state") retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "test_activateInstrumentSample: current state after sending go_active command %s (L4-CI-SA-RQ-334)", str(state), ) cmd = AgentCommand(command="run") reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: run %s", str(reply)) time.sleep(1) log.debug("test_activateInstrumentSample: calling acquire_sample ") cmd = AgentCommand(command="acquire_sample") reply = self._ia_client.execute(cmd) log.debug("test_activateInstrumentSample: return from acquire_sample %s", str(reply)) time.sleep(1) log.debug("test_activateInstrumentSample: calling acquire_sample 2") cmd = AgentCommand(command="acquire_sample") reply = self._ia_client.execute(cmd) log.debug("test_activateInstrumentSample: return from acquire_sample 2 %s", str(reply)) time.sleep(1) log.debug("test_activateInstrumentSample: calling acquire_sample 3") cmd = AgentCommand(command="acquire_sample") reply = self._ia_client.execute(cmd) log.debug("test_activateInstrumentSample: return from acquire_sample 3 %s", str(reply)) time.sleep(2) log.debug("test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command="reset") reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s", str(reply)) time.sleep(1) # ------------------------------- # Deactivate InstrumentAgentInstance # ------------------------------- self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) @unittest.skip("TBD") def test_activateInstrumentStream(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject( RT.InstrumentModel, name="SBE37IMModel", description="SBE37IMModel", model="SBE37IMModel" ) try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" % ex) log.debug("new InstrumentModel id = %s ", instModel_id) # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name="agent007", description="SBE37IMAgent", driver_module="ion.agents.instrument.instrument_agent", driver_class="InstrumentAgent", ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" % ex) log.debug("new InstrumentAgent id = %s", instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug( "test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) " ) instDevice_obj = IonObject( RT.InstrumentDevice, name="SBE37IMDevice", description="SBE37IMDevice", serial_number="12345" ) try: instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id ) instAgentInstance_obj = IonObject( RT.InstrumentAgentInstance, name="SBE37IMAgentInstance", description="SBE37IMAgentInstance", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver", comms_device_address="sbe37-simulator.oceanobservatories.org", comms_device_port=4001, port_agent_work_dir="/tmp/", port_agent_delimeter=["<<", ">>"], ) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id ) # create a stream definition for the data from the ctd simulator ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def) log.debug("new Stream Definition id = %s", instDevice_id) log.debug("Creating new CDM data product with a stream definition") craft = CoverageCraft sdom, tdom = craft.create_domains() sdom = sdom.dump() tdom = tdom.dump() parameter_dictionary = craft.create_parameters() parameter_dictionary = parameter_dictionary.dump() dp_obj = IonObject( RT.DataProduct, name="the parsed data", description="ctd stream test", temporal_domain=tdom, spatial_domain=sdom, ) data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary) log.debug("new dp_id = %s", data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug("Data product streams1 = %s", stream_ids) pid = self.create_logger("ctd_parsed", stream_ids[0]) self.loggerpids.append(pid) # simdata_subscription_id = self.pubsubcli.create_subscription( # query=StreamQuery([stream_ids[0]]), # exchange_name='Sim_data_queue', # name='SimDataSubscription', # description='SimData SubscriptionDescription' # ) # # # def simdata_message_received(message, headers): # input = str(message) # log.debug("test_activateInstrumentStream: granule received: %s", input) # # # subscriber_registrar = StreamSubscriberRegistrar(process=self.container, container=self.container) # simdata_subscriber = subscriber_registrar.create_subscriber(exchange_name='Sim_data_queue', callback=simdata_message_received) # # # Start subscribers # simdata_subscriber.start() # # # Activate subscriptions # self.pubsubcli.activate_subscription(simdata_subscription_id) log.debug("Creating new RAW data product with a stream definition") raw_stream_def = SBE37_RAW_stream_definition() raw_stream_def_id = self.pubsubcli.create_stream_definition(container=raw_stream_def) dp_obj = IonObject( RT.DataProduct, name="the raw data", description="raw stream test", temporal_domain=tdom, spatial_domain=sdom, ) data_product_id2 = self.dpclient.create_data_product(dp_obj, raw_stream_def_id, parameter_dictionary) log.debug("new dp_id = %s", str(data_product_id2)) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug("Data product streams2 = %s", str(stream_ids)) self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(instAgentInstance_id) log.debug("test_activateInstrumentStream Instrument agent instance obj: = %s", str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. # self._ia_client = ResourceAgentClient('123xyz', name=inst_agent_instance_obj.agent_process_id, process=FakeProcess()) self._ia_client = ResourceAgentClient(instDevice_id, process=FakeProcess()) log.debug("test_activateInstrumentStream: got ia client %s", str(self._ia_client)) cmd = AgentCommand(command="initialize") retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentStream: initialize %s", str(retval)) time.sleep(2) log.debug("test_activateInstrumentStream: Sending go_active command (L4-CI-SA-RQ-334)") cmd = AgentCommand(command="go_active") reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentStream: return value from go_active %s", str(reply)) time.sleep(2) cmd = AgentCommand(command="get_current_state") retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "test_activateInstrumentStream: current state after sending go_active command %s (L4-CI-SA-RQ-334)", str(state), ) cmd = AgentCommand(command="run") reply = self._ia_client.execute_agent(cmd) time.sleep(2) cmd = AgentCommand(command="get_current_state") retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: return from run state: %s", str(state)) # Make sure the sampling rate and transmission are sane. params = {SBE37Parameter.NAVG: 1, SBE37Parameter.INTERVAL: 5, SBE37Parameter.TXREALTIME: True} self._ia_client.set_param(params) time.sleep(2) log.debug("test_activateInstrumentStream: calling go_streaming ") cmd = AgentCommand(command="go_streaming") reply = self._ia_client.execute_agent(cmd) time.sleep(2) cmd = AgentCommand(command="get_current_state") retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: return from go_streaming state: %s", str(state)) time.sleep(5) log.debug("test_activateInstrumentStream: calling go_observatory") cmd = AgentCommand(command="go_observatory") reply = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: return from go_observatory state %s", str(state)) log.debug("test_activateInstrumentStream: calling reset ") cmd = AgentCommand(command="reset") reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentStream: return from reset state:%s", str(reply.result)) time.sleep(2) # ------------------------------- # Deactivate InstrumentAgentInstance # ------------------------------- self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name='Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'ruv_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['dataset_path'] = 'test_data/RDLi_SEAB_2011_08_24_1600.ruv' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ ] # Create DataSourceModel dsrc_model = DataSourceModel(name='ruv_model') dsrc_model.model = 'RUV' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='ruv_parsed_product', description='parsed ruv product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id})) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('data','test data') #CBM: Eventually, probably want to group this crap somehow - not sure how yet... # Create the logger for receiving publications self.create_stream_and_logger(name='ruv',stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'external_dataset_res':dset, 'taxonomy':ttool.dump(), 'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':20, }
class TestActivateInstrumentIntegration(IonIntegrationTestCase): def setUp(self): # Start container super(TestActivateInstrumentIntegration, self).setUp() config = DotDict() config.bootstrap.use_es = True self._start_container() self.addCleanup(TestActivateInstrumentIntegration.es_cleanup) self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.usernotificationclient = UserNotificationServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher() @staticmethod def es_cleanup(): es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') es_port = CFG.get_safe('server.elasticsearch.port', '9200') es = ep.ElasticSearch(host=es_host, port=es_port, timeout=10) indexes = STD_INDEXES.keys() indexes.append('%s_resources_index' % get_sys_name().lower()) indexes.append('%s_events_index' % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete, index) IndexManagementService._es_call(es.index_delete, index) def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name + '_logger') producer_definition.executable = { 'module': 'ion.processes.data.stream_granule_logger', 'class': 'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition( process_definition=producer_definition) configuration = { 'process': { 'stream_id': stream_id, } } pid = self.processdispatchclient.schedule_process( process_definition_id=logger_procdef_id, configuration=configuration) return pid def _create_notification(self, user_name='', instrument_id='', product_id=''): #-------------------------------------------------------------------------------------- # Make notification request objects #-------------------------------------------------------------------------------------- notification_request_1 = NotificationRequest( name='notification_1', origin=instrument_id, origin_type="instrument", event_type='ResourceLifecycleEvent') notification_request_2 = NotificationRequest( name='notification_2', origin=product_id, origin_type="data product", event_type='DetectionEvent') #-------------------------------------------------------------------------------------- # Create a user and get the user_id #-------------------------------------------------------------------------------------- user = UserInfo() user.name = user_name user.contact.email = '*****@*****.**' % user_name user_id, _ = self.rrclient.create(user) #-------------------------------------------------------------------------------------- # Create notification #-------------------------------------------------------------------------------------- self.usernotificationclient.create_notification( notification=notification_request_1, user_id=user_id) self.usernotificationclient.create_notification( notification=notification_request_2, user_id=user_id) log.debug( "test_activateInstrumentSample: create_user_notifications user_id %s", str(user_id)) return user_id def get_datastore(self, dataset_id): dataset = self.datasetclient.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def _check_computed_attributes_of_extended_instrument( self, expected_instrument_device_id='', extended_instrument=None): # Verify that computed attributes exist for the extended instrument self.assertIsInstance(extended_instrument.computed.firmware_version, ComputedFloatValue) self.assertIsInstance( extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance( extended_instrument.computed.last_calibration_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance( extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.location_status_roll_up, ComputedIntValue) # the following assert will not work without elasticsearch. #self.assertEqual( 1, len(extended_instrument.computed.user_notification_requests.value) ) self.assertEqual( extended_instrument.computed.communications_status_roll_up.value, StatusType.STATUS_WARNING) self.assertEqual( extended_instrument.computed.data_status_roll_up.value, StatusType.STATUS_OK) self.assertEqual( extended_instrument.computed.power_status_roll_up.value, StatusType.STATUS_WARNING) # Verify the computed attribute for user notification requests self.assertEqual( 1, len(extended_instrument.computed.user_notification_requests.value)) notifications = extended_instrument.computed.user_notification_requests.value notification = notifications[0] self.assertEqual(notification.origin, expected_instrument_device_id) self.assertEqual(notification.origin_type, "instrument") self.assertEqual(notification.event_type, 'ResourceLifecycleEvent') def _check_computed_attributes_of_extended_product( self, expected_data_product_id='', extended_data_product=None): self.assertEqual(expected_data_product_id, extended_data_product._id) log.debug("extended_data_product.computed: %s", extended_data_product.computed) # Verify that computed attributes exist for the extended instrument self.assertIsInstance( extended_data_product.computed.product_download_size_estimated, ComputedIntValue) self.assertIsInstance( extended_data_product.computed.number_active_subscriptions, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.data_url, ComputedStringValue) self.assertIsInstance(extended_data_product.computed.stored_data_size, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.recent_granules, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.parameters, ComputedListValue) self.assertIsInstance(extended_data_product.computed.recent_events, ComputedEventListValue) self.assertIsInstance(extended_data_product.computed.provenance, ComputedDictValue) self.assertIsInstance( extended_data_product.computed.user_notification_requests, ComputedListValue) self.assertIsInstance( extended_data_product.computed.active_user_subscriptions, ComputedListValue) self.assertIsInstance( extended_data_product.computed.past_user_subscriptions, ComputedListValue) self.assertIsInstance(extended_data_product.computed.last_granule, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.is_persisted, ComputedIntValue) self.assertIsInstance( extended_data_product.computed.data_contents_updated, ComputedStringValue) self.assertIsInstance(extended_data_product.computed.data_datetime, ComputedListValue) # exact text here keeps changing to fit UI capabilities. keep assertion general... self.assertTrue('ok' in extended_data_product.computed.last_granule. value['quality_flag']) self.assertEqual( 2, len(extended_data_product.computed.data_datetime.value)) notifications = extended_data_product.computed.user_notification_requests.value notification = notifications[0] self.assertEqual(notification.origin, expected_data_product_id) self.assertEqual(notification.origin_type, "data product") self.assertEqual(notification.event_type, 'DetectionEvent') @attr('LOCOINT') @unittest.skipIf(not use_es, 'No ElasticSearch') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') @patch.dict(CFG, {'endpoint': {'receive': {'timeout': 60}}}) def test_activateInstrumentSample(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('new InstrumentModel id = %s ', instModel_id) #Create stream alarms """ test_two_sided_interval Test interval alarm and alarm event publishing for a closed inteval. """ # kwargs = { # 'name' : 'test_sim_warning', # 'stream_name' : 'parsed', # 'value_id' : 'temp', # 'message' : 'Temperature is above test range of 5.0.', # 'type' : StreamAlarmType.WARNING, # 'upper_bound' : 5.0, # 'upper_rel_op' : '<' # } kwargs = { 'name': 'temperature_warning_interval', 'stream_name': 'parsed', 'value_id': 'temp', 'message': 'Temperature is below the normal range of 50.0 and above.', 'type': StreamAlarmType.WARNING, 'lower_bound': 50.0, 'lower_rel_op': '<' } # Create alarm object. alarm = {} alarm['type'] = 'IntervalAlarmDef' alarm['kwargs'] = kwargs raw_config = StreamConfiguration( stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5) parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5, alarms=[alarm]) # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri= "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1a-py2.7.egg", stream_configurations=[raw_config, parsed_config]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ' ) instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.pubsubcli.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug('Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('ctd_parsed', stream_ids[0]) self.loggerpids.append(pid) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) # setup notifications for the device and parsed data product user_id_1 = self._create_notification(user_name='user_1', instrument_id=instDevice_id, product_id=data_product_id1) #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users --------- user_id_2 = self._create_notification(user_name='user_2', instrument_id=instDevice_id, product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug('Data product streams2 = %s', str(stream_ids)) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id2 = %s', dataset_ids[0]) self.raw_dataset = dataset_ids[0] #elastic search debug es_indexes, _ = self.container.resource_registry.find_resources( restype='ElasticSearchIndex') log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes]) log.debug('Bootstrap %s', CFG.bootstrap.use_es) def start_instrument_agent(): self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) gevent.joinall([gevent.spawn(start_instrument_agent)]) #setup a subscriber to alarm events from the device self._events_received = [] self._event_count = 0 self._samples_out_of_range = 0 self._samples_complete = False self._async_sample_result = AsyncResult() def consume_event(*args, **kwargs): log.debug( 'TestActivateInstrument recieved ION event: args=%s, kwargs=%s, event=%s.', str(args), str(kwargs), str(args[0])) self._events_received.append(args[0]) self._event_count = len(self._events_received) self._async_sample_result.set() self._event_subscriber = EventSubscriber( event_type= 'StreamWarningAlarmEvent', #'StreamWarningAlarmEvent', # StreamAlarmEvent callback=consume_event, origin=instDevice_id) self._event_subscriber.start() #cleanup self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) def stop_subscriber(): self._event_subscriber.stop() self._event_subscriber = None self.addCleanup(stop_subscriber) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) gate = ProcessStateGate(self.processdispatchclient.read_process, inst_agent_instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % inst_agent_instance_obj.agent_process_id) log.debug('Instrument agent instance obj: = %s', str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient( instDevice_id, to_name=inst_agent_instance_obj.agent_process_id, process=FakeProcess()) log.debug("test_activateInstrumentSample: got ia client %s", str(self._ia_client)) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: initialize %s", str(retval)) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.INACTIVE) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrument: return value from go_active %s", str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.IDLE) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: run %s", str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.COMMAND) cmd = AgentCommand(command=ResourceAgentEvent.PAUSE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.STOPPED) cmd = AgentCommand(command=ResourceAgentEvent.RESUME) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.COMMAND) cmd = AgentCommand(command=ResourceAgentEvent.CLEAR) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.IDLE) cmd = AgentCommand(command=ResourceAgentEvent.RUN) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.COMMAND) cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE) for i in xrange(10): retval = self._ia_client.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from sample %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s", str(reply)) self._samples_complete = True #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data = self.dataretrieverclient.retrieve(self.parsed_dataset) self.assertIsInstance(replay_data, Granule) rdt = RecordDictionaryTool.load_from_granule(replay_data) log.debug("test_activateInstrumentSample: RDT parsed: %s", str(rdt.pretty_print())) temp_vals = rdt['temp'] self.assertEquals(len(temp_vals), 10) log.debug("test_activateInstrumentSample: all temp_vals: %s", temp_vals) #out_of_range_temp_vals = [i for i in temp_vals if i > 5] out_of_range_temp_vals = [i for i in temp_vals if i < 50.0] log.debug("test_activateInstrumentSample: Out_of_range_temp_vals: %s", out_of_range_temp_vals) self._samples_out_of_range = len(out_of_range_temp_vals) # if no bad values were produced, then do not wait for an event if self._samples_out_of_range == 0: self._async_sample_result.set() log.debug("test_activateInstrumentSample: _events_received: %s", self._events_received) log.debug("test_activateInstrumentSample: _event_count: %s", self._event_count) self._async_sample_result.get(timeout=CFG.endpoint.receive.timeout) replay_data = self.dataretrieverclient.retrieve(self.raw_dataset) self.assertIsInstance(replay_data, Granule) rdt = RecordDictionaryTool.load_from_granule(replay_data) log.debug("RDT raw: %s", str(rdt.pretty_print())) raw_vals = rdt['raw'] self.assertEquals(len(raw_vals), 10) log.debug("l4-ci-sa-rq-138") """ Physical resource control shall be subject to policy Instrument management control capabilities shall be subject to policy The actor accessing the control capabilities must be authorized to send commands. note from maurice 2012-05-18: Talk to tim M to verify that this is policy. If it is then talk with Stephen to get an example of a policy test and use that to create a test stub that will be completed when we have instrument policies. Tim M: The "actor", aka observatory operator, will access the instrument through ION. """ #-------------------------------------------------------------------------------- # Get the extended data product to see if it contains the granules #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension( data_product_id=data_product_id1, user_id=user_id_1) def poller(extended_product): return len(extended_product.computed.user_notification_requests. value) == 1 poll(poller, extended_product, timeout=30) self._check_computed_attributes_of_extended_product( expected_data_product_id=data_product_id1, extended_data_product=extended_product) #-------------------------------------------------------------------------------- #put some events into the eventsdb to test - this should set the comms and data status to WARNING #-------------------------------------------------------------------------------- t = get_ion_ts() self.event_publisher.publish_event(ts_created=t, event_type='DeviceStatusEvent', origin=instDevice_id, state=DeviceStatusType.OUT_OF_RANGE, values=[200]) self.event_publisher.publish_event( ts_created=t, event_type='DeviceCommsEvent', origin=instDevice_id, state=DeviceCommsType.DATA_DELIVERY_INTERRUPTION, lapse_interval_seconds=20) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id, user_id=user_id_1) self._check_computed_attributes_of_extended_instrument( expected_instrument_device_id=instDevice_id, extended_instrument=extended_instrument) #-------------------------------------------------------------------------------- # For the second user, check the extended data product and the extended intrument #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension( data_product_id=data_product_id2, user_id=user_id_2) self._check_computed_attributes_of_extended_product( expected_data_product_id=data_product_id2, extended_data_product=extended_product) #---------- Put some events into the eventsdb to test - this should set the comms and data status to WARNING --------- t = get_ion_ts() self.event_publisher.publish_event(ts_created=t, event_type='DeviceStatusEvent', origin=instDevice_id, state=DeviceStatusType.OUT_OF_RANGE, values=[200]) self.event_publisher.publish_event( ts_created=t, event_type='DeviceCommsEvent', origin=instDevice_id, state=DeviceCommsType.DATA_DELIVERY_INTERRUPTION, lapse_interval_seconds=20) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id, user_id=user_id_2) self._check_computed_attributes_of_extended_instrument( expected_instrument_device_id=instDevice_id, extended_instrument=extended_instrument) #-------------------------------------------------------------------------------- # Deactivate loggers #-------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) self.dpclient.delete_data_product(data_product_id1) self.dpclient.delete_data_product(data_product_id2)
class TestInstrumentAlerts(IonIntegrationTestCase): pdict_id = None def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.catch_alert= gevent.queue.Queue() def _create_instrument_model(self): instModel_obj = IonObject( RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" ) instModel_id = self.imsclient.create_instrument_model(instModel_obj) self.addCleanup(self.imsclient.delete_instrument_model, instModel_id) return instModel_id def _create_instrument_agent(self, instModel_id): raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 ) instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.4-py2.7.egg", stream_configurations = [raw_config, parsed_config] ) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) self.addCleanup(self.imsclient.delete_instrument_agent, instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) return instAgent_id def _create_instrument_device(self, instModel_id): instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) self.addCleanup(self.imsclient.delete_instrument_device, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) return instDevice_id def _create_instrument_stream_alarms(self, instDevice_id): #Create stream alarms """ test_two_sided_interval Test interval alarm and alarm event publishing for a closed inteval. """ temp_alert_def = { 'name' : 'temperature_warning_interval', 'stream_name' : 'parsed', 'description' : 'Temperature is below the normal range of 50.0 and above.', 'alert_type' : StreamAlertType.WARNING, 'aggregate_type' : AggregateStatusType.AGGREGATE_DATA, 'value_id' : 'temp', 'resource_id' : instDevice_id, 'origin_type' : 'device', 'lower_bound' : 50.0, 'lower_rel_op' : '<', 'alert_class' : 'IntervalAlert' } late_data_alert_def = { 'name' : 'late_data_warning', 'stream_name' : 'parsed', 'description' : 'Expected data has not arrived.', 'alert_type' : StreamAlertType.WARNING, 'aggregate_type' : AggregateStatusType.AGGREGATE_COMMS, 'value_id' : None, 'resource_id' : instDevice_id, 'origin_type' : 'device', 'time_delta' : 2, 'alert_class' : 'LateDataAlert' } return temp_alert_def, late_data_alert_def def _create_instrument_agent_instance(self, instAgent_id, instDevice_id): # port_agent_config = { # 'device_addr': CFG.device.sbe37.host, # 'device_port': CFG.device.sbe37.port, # 'process_type': PortAgentProcessType.UNIX, # 'binary_path': "port_agent", # 'port_agent_addr': 'localhost', # 'command_port': CFG.device.sbe37.port_agent_cmd_port, # 'data_port': CFG.device.sbe37.port_agent_data_port, # 'log_level': 5, # 'type': PortAgentType.ETHERNET # } port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': 4008, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } temp_alert, late_data_alert = self._create_instrument_stream_alarms(instDevice_id) instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config, alerts= [temp_alert, late_data_alert] ) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) self.addCleanup(self.imsclient.delete_instrument_agent_instance, instAgentInstance_id) return instAgentInstance_id def test_alerts(self): #------------------------------------------------------------------------------------- # Create InstrumentModel #------------------------------------------------------------------------------------- instModel_id = self._create_instrument_model() #------------------------------------------------------------------------------------- # Create InstrumentAgent #------------------------------------------------------------------------------------- instAgent_id = self._create_instrument_agent(instModel_id) #------------------------------------------------------------------------------------- # Create InstrumentDevice #------------------------------------------------------------------------------------- instDevice_id = self._create_instrument_device(instModel_id) # It is necessary for the instrument device to be associated with atleast one output data product tdom, sdom = time_series_domain() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id) # We are creating two data products here, one for parsed and another raw dp_obj_parsed = IonObject(RT.DataProduct, name='parsed_data_product', description='Parsed output data product for instrument', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_obj_raw = IonObject(RT.DataProduct, name='raw_data_prod', description='Raw output data product for instrument', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) parsed_out_data_prod_id = self.dataproductclient.create_data_product(data_product=dp_obj_parsed, stream_definition_id=parsed_stream_def_id) raw_out_data_prod_id = self.dataproductclient.create_data_product(data_product=dp_obj_raw, stream_definition_id=raw_stream_def_id) self.addCleanup(self.dataproductclient.delete_data_product, parsed_out_data_prod_id) self.addCleanup(self.dataproductclient.delete_data_product, raw_out_data_prod_id) self.dataproductclient.activate_data_product_persistence(data_product_id=parsed_out_data_prod_id) self.dataproductclient.activate_data_product_persistence(data_product_id=raw_out_data_prod_id) # todo: note that the generated config on the instruments will be done for both raw and parsed stream defs since these two data products constructed with each are associated as output data products with the instrument # todo: if the config is not generated for a stream def, then the instrument agent will complain if the simulator generates data corresponding to a stream def that is not there in the stream config as a mentioned stream def self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=parsed_out_data_prod_id) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=raw_out_data_prod_id) log.debug("assigned instdevice id: %s to data product: %s", instDevice_id, raw_out_data_prod_id) #------------------------------------------------------------------------------------- # Create Instrument Agent Instance #------------------------------------------------------------------------------------- instAgentInstance_id = self._create_instrument_agent_instance(instAgent_id,instDevice_id ) #------------------------------------------------------------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------------------------------------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id) # Wait for instrument agent to spawn gate = ProcessStateGate(self.processdispatchclient.read_process, inst_agent_instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(15), "The instrument agent instance did not spawn in 15 seconds") # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=inst_agent_instance_obj.agent_process_id, process=FakeProcess()) #------------------------------------------------------------------------------------- # Set up the subscriber to catch the alert event #------------------------------------------------------------------------------------- def callback_for_alert(event, *args, **kwargs): log.debug("caught an alert: %s", event) self.catch_alert.put(event) self.event_subscriber = EventSubscriber(event_type='DeviceStatusAlertEvent', origin=instDevice_id, callback=callback_for_alert) self.event_subscriber.start() self.addCleanup(self.event_subscriber.stop) #------------------------------------------------------------------------------------- # Running the instrument.... #------------------------------------------------------------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) got_bad_temp = False got_late_data = False runtime = 0 starttime = time.time() caught_events = [] while (got_bad_temp == False or got_late_data == False) and \ runtime < 120: a = self.catch_alert.get(timeout=90) caught_events.append(a) if a.name == 'temperature_warning_interval' and \ a.description == 'Temperature is below the normal range of 50.0 and above.': got_bad_temp = True if a.name == 'late_data_warning' and \ a.description == 'Expected data has not arrived.': got_late_data = True runtime = time.time() - starttime log.debug("caught_events: %s", [c.name for c in caught_events]) for c in caught_events: self.assertIn(c.name, ['temperature_warning_interval', 'late_data_warning']) self.assertEqual(c.origin, instDevice_id) self.assertEqual(c.type_, 'DeviceStatusAlertEvent') self.assertEqual(c.origin_type, 'InstrumentDevice') self.assertTrue(got_bad_temp) self.assertTrue(got_late_data)
class TestActivateRSNVel3DInstrument(IonIntegrationTestCase): def setUp(self): # Start container super(TestActivateRSNVel3DInstrument, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name + '_logger') producer_definition.executable = { 'module': 'ion.processes.data.stream_granule_logger', 'class': 'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition( process_definition=producer_definition) configuration = { 'process': { 'stream_id': stream_id, } } pid = self.processdispatchclient.schedule_process( process_definition_id=logger_procdef_id, configuration=configuration) return pid @attr('LOCOINT') @unittest.skip('under construction') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') @patch.dict(CFG, {'endpoint': {'receive': {'timeout': 180}}}) def test_activate_rsn_vel3d(self): log.info( "--------------------------------------------------------------------------------------------------------" ) # load_parameter_scenarios self.container.spawn_process( "Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict( op="load", scenario="BETA", path="master", categories= "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition", clearcols="owner_id,org_ids", assets="res/preload/r2_ioc/ooi_assets", parseooi="True", )) self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='Vel3DMModel', description="Vel3DMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('test_activate_rsn_vel3d new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') vel3d_b_sample = StreamConfiguration( stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample') vel3d_b_engineering = StreamConfiguration( stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering') RSN_VEL3D_01 = { 'DEV_ADDR': "10.180.80.6", 'DEV_PORT': 2101, 'DATA_PORT': 1026, 'CMD_PORT': 1025, 'PA_BINARY': "port_agent" } # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='Vel3DAgent', description="Vel3DAgent", driver_uri= "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg", stream_configurations=[ raw_config, vel3d_b_sample, vel3d_b_engineering ]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('test_activate_rsn_vel3d new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_activate_rsn_vel3d: Create instrument resource to represent the Vel3D ' ) instDevice_obj = IonObject(RT.InstrumentDevice, name='Vel3DDevice', description="Vel3DDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug("test_activate_rsn_vel3d: new InstrumentDevice id = %s ", instDevice_id) port_agent_config = { 'device_addr': '10.180.80.6', 'device_port': 2101, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 1025, 'data_port': 1026, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='Vel3DAgentInstance', description="Vel3DAgentInstance", port_agent_config=port_agent_config, alerts=[]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) parsed_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_sample', id_only=True) parsed_sample_stream_def_id = self.pubsubcli.create_stream_definition( name='vel3d_b_sample', parameter_dictionary_id=parsed_sample_pdict_id) parsed_eng_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_engineering', id_only=True) parsed_eng_stream_def_id = self.pubsubcli.create_stream_definition( name='vel3d_b_engineering', parameter_dictionary_id=parsed_eng_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubcli.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='vel3d_b_sample', description='vel3d_b_sample') sample_data_product_id = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_sample_stream_def_id) log.debug('new dp_id = %s', sample_data_product_id) self.dpclient.activate_data_product_persistence( data_product_id=sample_data_product_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=sample_data_product_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(sample_data_product_id, PRED.hasStream, None, True) log.debug('sample_data_product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(sample_data_product_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for sample_data_product = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('vel3d_b_sample', stream_ids[0]) self.loggerpids.append(pid) dp_obj = IonObject(RT.DataProduct, name='vel3d_b_engineering', description='vel3d_b_engineering') eng_data_product_id = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_eng_stream_def_id) log.debug('new dp_id = %s', eng_data_product_id) self.dpclient.activate_data_product_persistence( data_product_id=eng_data_product_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=eng_data_product_id) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test') data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug('test_activate_rsn_vel3d Data product streams2 = %s', str(stream_ids)) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) log.debug('test_activate_rsn_vel3d Data set for data_product_id2 = %s', dataset_ids[0]) self.raw_dataset = dataset_ids[0] def start_instrument_agent(): self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) gevent.joinall([gevent.spawn(start_instrument_agent)]) #cleanup self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) def check_state(label, desired_state): actual_state = self._ia_client.get_agent_state() log.debug("%s instrument agent is in state '%s'", label, actual_state) self.assertEqual(desired_state, actual_state) log.debug("test_activate_rsn_vel3d: got ia client %s", str(self._ia_client)) check_state("just-spawned", ResourceAgentState.UNINITIALIZED) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: initialize %s", str(retval)) check_state("initialized", ResourceAgentState.INACTIVE) log.debug("test_activate_rsn_vel3d Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: return value from go_active %s", str(reply)) check_state("activated", ResourceAgentState.IDLE) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("current state after sending go_active command %s", str(state)) # cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: run %s", str(reply)) check_state("commanded", ResourceAgentState.COMMAND) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("current state after sending run command %s", str(state)) # cmd = AgentCommand(command=ProtocolEvent.START_AUTOSAMPLE) # reply = self._ia_client.execute_agent(cmd) # log.debug("test_activate_rsn_vel3d: run %s" , str(reply)) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) # # gevent.sleep(5) # # cmd = AgentCommand(command=ProtocolEvent.STOP_AUTOSAMPLE) # reply = self._ia_client.execute_agent(cmd) # log.debug("test_activate_rsn_vel3d: run %s" , str(reply)) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) # # cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) # retval = self._ia_client.execute_agent(cmd) # state = retval.result # log.debug("current state after sending STOP_AUTOSAMPLE command %s" , str(state)) # # cmd = AgentCommand(command=ResourceAgentEvent.PAUSE) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.STOPPED, state) # # cmd = AgentCommand(command=ResourceAgentEvent.RESUME) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) # # cmd = AgentCommand(command=ResourceAgentEvent.CLEAR) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.IDLE, state) # # cmd = AgentCommand(command=ResourceAgentEvent.RUN) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) log.debug("test_activate_rsn_vel3d: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: return from reset %s", str(reply)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset) self.assertIsInstance(replay_data_raw, Granule) rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw) log.debug("RDT raw: %s", str(rdt_raw.pretty_print())) self.assertIn('raw', rdt_raw) raw_vals = rdt_raw['raw'] #-------------------------------------------------------------------------------- # Deactivate loggers #-------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) self.dpclient.delete_data_product(sample_data_product_id) self.dpclient.delete_data_product(eng_data_product_id) self.dpclient.delete_data_product(data_product_id2)
class TestTransformWorker(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process=TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process = process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10) def push_granule(self, data_product_id): ''' Publishes and monitors that the granule arrived ''' datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True) dataset_monitor = DatasetMonitor(datasets[0]) rdt = self.ph.rdt_for_data_product(data_product_id) self.ph.fill_parsed_rdt(rdt) self.ph.publish_rdt_to_data_product(data_product_id, rdt) assert dataset_monitor.wait() dataset_monitor.stop() @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.dp_list = [] self.data_process_objs = [] self._output_stream_ids = [] self.granule_verified = Event() self.worker_assigned_event_verified = Event() self.dp_created_event_verified = Event() self.heartbeat_event_verified = Event() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] self.start_event_listener() # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process() self.dp_list.append(dataprocess_id) # validate the repository for data product algorithms persists the new resources NEW SA-1 # create_data_process call created one of each dpd_ids, _ = self.rrclient.find_resources(restype=OT.DataProcessDefinition, id_only=False) # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above self.assertTrue(dpd_ids is not None) dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess, id_only=False) # only one DP becuase the PFs that are in the code dataproduct above are not activated yet. self.assertEquals(len(dp_ids), 1) # validate the name and version label NEW SA - 2 dataprocessdef_obj = self.dataprocessclient.read_data_process_definition(dataprocessdef_id) self.assertEqual(dataprocessdef_obj.version_label, '1.0a') self.assertEqual(dataprocessdef_obj.name, 'add_arrays') # validate that the DPD has an attachment NEW SA - 21 attachment_ids, assoc_ids = self.rrclient.find_objects(dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True) self.assertEqual(len(attachment_ids), 1) attachment_obj = self.rrclient.read_attachment(attachment_ids[0]) log.debug('attachment: %s', attachment_obj) # validate that the data process resource has input and output data products associated # L4-CI-SA-RQ-364 and NEW SA-3 outproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True) self.assertEqual(len(outproduct_ids), 1) inproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True) self.assertEqual(len(inproduct_ids), 1) # Test for provenance. Get Data product produced by the data processes output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 2) self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents']) self.assertTrue(output_data_product_provenance[output_data_product_id[0]]['parents'][self.input_dp_id]['data_process_definition_id'] == dataprocessdef_id) # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in # the metadata of each output data product created by the data product algorithm. output_data_product_obj,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=False) self.assertTrue(output_data_product_obj[0].name != None) self.assertTrue(output_data_product_obj[0]._rev != None) # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route ) for n in range(1, 101): rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) # validate that the output granule is received and the updated value is correct self.assertTrue(self.granule_verified.wait(self.wait_time)) # validate that the data process loaded into worker event is received (L4-CI-SA-RQ-182) self.assertTrue(self.worker_assigned_event_verified.wait(self.wait_time)) # validate that the data process create (with data product ids) event is received (NEW SA -42) self.assertTrue(self.dp_created_event_verified.wait(self.wait_time)) # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182) #this takes a while so set wait limit to large value self.assertTrue(self.heartbeat_event_verified.wait(200)) # validate that the code from the transform function can be retrieve via inspect_data_process_definition src = self.dataprocessclient.inspect_data_process_definition(dataprocessdef_id) self.assertIn( 'def add_arrays(a, b)', src) # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available self.dataprocessclient.delete_data_process(dataprocess_id) self.dataprocessclient.delete_data_process_definition(dataprocessdef_id) in_dp_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasInputProduct, object_type=RT.DataProduct, id_only=True) self.assertTrue(in_dp_objs is not None) dpd_objs, _ = self.rrclient.find_subjects(subject_type=RT.DataProcessDefinition, predicate=PRED.hasDataProcess, object=dataprocess_id, id_only=True) self.assertTrue(dpd_objs is not None) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_instrumentdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='test_ctd_device') if instDevice_obj: instDevice_id = instDevice_obj[0]._id else: instDevice_obj = IonObject(RT.InstrumentDevice, name='test_ctd_device', description="test_ctd_device", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process() self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents']) self.assertTrue(instDevice_id in output_data_product_provenance[self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[instDevice_id]['type'] == 'InstrumentDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_platformdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. platform_device_obj,_ = self.rrclient.find_resources(restype=RT.PlatformDevice, name='TestPlatform') if platform_device_obj: platform_device_id = platform_device_obj[0]._id else: platform_device_obj = IonObject(RT.PlatformDevice, name='TestPlatform', description="TestPlatform", serial_number="12345" ) platform_device_id = self.imsclient.create_platform_device(platform_device=platform_device_obj) self.damsclient.assign_data_product(input_resource_id=platform_device_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process() self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents']) self.assertTrue(platform_device_id in output_data_product_provenance[self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[platform_device_id]['type'] == 'PlatformDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_event_transform_worker(self): self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # test that a data process (type: data-product-in / event-out) can be defined and launched. # verify that event fields are correctly populated self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] # create the DPD and two DPs self.event_data_process_id = self.create_event_data_processes() # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects(subject=self.event_data_process_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_event_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route ) self.start_event_transform_listener() self.data_modified = Event() rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) self.assertTrue(self.event_verified.wait(self.wait_time)) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_bad_argument_map(self): self._output_stream_ids = [] # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during # data process creation and that the correct exception is raised for both input and output. self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() # Set up DPD and DP #2 - array add function tf_obj = IonObject(RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject(RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' ) # create the data process with invalid argument map argument_map = {"arr1": "foo", "arr2": "bar"} output_param = "salinity" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual(ex.message, "Input data product does not contain the parameters defined in argument map") # create the data process with invalid output parameter name argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "foo" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual(ex.message, "Output data product does not contain the output parameter name provided") def create_event_data_processes(self): # two data processes using one transform and one DPD argument_map= {"a": "salinity"} # set up DPD and DP #2 - array add function tf_obj = IonObject(RT.TransformFunction, name='validate_salinity_array', description='validate_salinity_array', function='validate_salinity_array', module="ion.processes.data.transforms.test.test_transform_worker", arguments=['a'], function_type=TransformFunctionType.TRANSFORM ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject(RT.DataProcessDefinition, name='validate_salinity_array', description='validate_salinity_array', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='validate_salinity_array' ) # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=None, argument_map=argument_map) self.damsclient.register_process(dp1_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) return dp1_data_process_id def create_data_process(self): # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "salinity" # set up DPD and DP #2 - array add function tf_obj = IonObject(RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject(RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, version_label='1.0a' ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' ) # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) self.damsclient.register_process(dp1_data_process_id) #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) # add an attachment object to this DPD to test new SA-21 import msgpack attachment_content = 'foo bar' attachment_obj = IonObject( RT.Attachment, name='test_attachment', attachment_type=AttachmentType.ASCII, content_type='text/plain', content=msgpack.packb(attachment_content)) att_id = self.rrclient.create_attachment(add_array_dpd_id, attachment_obj) self.addCleanup(self.rrclient.delete_attachment, att_id) return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id def create_output_data_product(self): dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition(name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id) dp1_output_dp_obj = IonObject( RT.DataProduct, name='data_process1_data_product', description='output of add array func', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) dp1_func_output_dp_id = self.dataproductclient.create_data_product(dp1_output_dp_obj, dp1_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id) # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_ids.append(stream_ids[0]) subscription_id = self.pubsub_client.create_subscription('validator', data_product_ids=[dp1_func_output_dp_id]) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) def on_granule(msg, route, stream_id): log.debug('recv_packet stream_id: %s route: %s msg: %s', stream_id, route, msg) self.validate_output_granule(msg, route, stream_id) self.granule_verified.set() validator = StandaloneStreamSubscriber('validator', callback=on_granule) validator.start() self.addCleanup(validator.stop) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) return dp1_func_output_dp_id def validate_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ data_process_event = args[0] log.debug("DataProcessStatusEvent: %s" , str(data_process_event.__dict__)) # if data process already created, check origin if self.dp_list: self.assertIn( data_process_event.origin, self.dp_list) # if this is a heartbeat event then 100 granules have been processed if 'data process status update.' in data_process_event.description: self.heartbeat_event_verified.set() else: # else check that this is the assign event if 'Data process assigned to transform worker' in data_process_event.description: self.worker_assigned_event_verified.set() elif 'Data process created for data product' in data_process_event.description: self.dp_created_event_verified.set() def validate_output_granule(self, msg, route, stream_id): self.assertIn( stream_id, self._output_stream_ids) rdt = RecordDictionaryTool.load_from_granule(msg) log.debug('validate_output_granule rdt: %s', rdt) sal_val = rdt['salinity'] np.testing.assert_array_equal(sal_val, np.array([3])) def start_event_listener(self): es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event) es.start() self.addCleanup(es.stop) def validate_transform_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ status_alert_event = args[0] np.testing.assert_array_equal(status_alert_event.origin, self.stream_id ) np.testing.assert_array_equal(status_alert_event.values, np.array([self.event_data_process_id])) log.debug("DeviceStatusAlertEvent: %s" , str(status_alert_event.__dict__)) self.event_verified.set() def start_event_transform_listener(self): es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent, callback=self.validate_transform_event) es.start() self.addCleanup(es.stop) def test_download(self): egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' egg_path = TransformWorker.download_egg(egg_url) import pkg_resources pkg_resources.working_set.add_entry(egg_path) from ion_example.add_arrays import add_arrays a = add_arrays(1,2) self.assertEquals(a,3)
class TestDataProcessWithLookupTable(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.dataset_management = self.datasetclient self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) def test_lookupTableProcessing(self): #------------------------------- # Create InstrumentModel #------------------------------- instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") try: instModel_id = self.imsclient.create_instrument_model( instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" % ex) log.info( 'test_createTransformsThenActivateInstrument: new InstrumentModel id = %s', instModel_id) #------------------------------- # Create InstrumentAgent #------------------------------- instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD) try: instAgent_id = self.imsclient.create_instrument_agent( instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" % ex) log.info( 'test_createTransformsThenActivateInstrument: new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) #------------------------------- # Create InstrumentDevice and attachment for lookup table #------------------------------- instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") try: instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) log.info( 'test_createTransformsThenActivateInstrument: new InstrumentDevice id = %s', instDevice_id) contents = "this is the lookup table contents, replace with a file..." att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) deviceAttachment = self.rrclient.create_attachment(instDevice_id, att) log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment) #------------------------------- # Create InstrumentAgentInstance to hold configuration information #------------------------------- instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance") self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # Create CTD Parsed as the first data product #------------------------------- # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=pdict_id) log.info( 'TestDataProcessWithLookupTable: new Stream Definition id = %s', instDevice_id) log.info('Creating new CDM data product with a stream definition') # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='ctd_parsed', description='ctd stream test', temporal_domain=tdom, spatial_domain=sdom) ctd_parsed_data_product = self.dataproductclient.create_data_product( dp_obj, ctd_stream_def_id) log.info('new ctd_parsed_data_product_id = %s', ctd_parsed_data_product) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True) log.info('TestDataProcessWithLookupTable: Data product streams1 = %s', stream_ids) #------------------------------- # Create CTD Raw as the second data product #------------------------------- log.info( 'TestDataProcessWithLookupTable: Creating new RAW data product with a stream definition' ) raw_stream_def_id = self.pubsubclient.create_stream_definition( name='SBE37_RAW', parameter_dictionary_id=pdict_id) dp_obj = IonObject(RT.DataProduct, name='ctd_raw', description='raw stream test', temporal_domain=tdom, spatial_domain=sdom) ctd_raw_data_product = self.dataproductclient.create_data_product( dp_obj, raw_stream_def_id) log.info('new ctd_raw_data_product_id = %s', ctd_raw_data_product) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) log.info('Data product streams2 = %s', stream_ids) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug( "TestDataProcessWithLookupTable: create data process definition ctd_L0_all" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new ctd_L0_all data process definition: %s" % ex) contents = "this is the lookup table contents for L0 Conductivity - Temperature - Pressure: Data Process Definition, replace with a file..." att = IonObject(RT.Attachment, name='processDefinitionLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) processDefinitionAttachment = self.rrclient.create_attachment( ctd_L0_all_dprocdef_id, att) log.debug( "TestDataProcessWithLookupTable:test_createTransformsThenActivateInstrument: InstrumentDevice attachment id %s", str(processDefinitionAttachment)) processDefinitionAttachment_obj = self.rrclient.read( processDefinitionAttachment) log.debug( "TestDataProcessWithLookupTable:test_createTransformsThenActivateInstrument: InstrumentDevice attachment obj %s", str(processDefinitionAttachment_obj)) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition( name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity') outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition( name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure') outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition( name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature') log.debug( "TestDataProcessWithLookupTable: create output data product L0 conductivity" ) ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) log.debug( "TestDataProcessWithLookupTable: create output data product L0 pressure" ) ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) log.debug( "TestDataProcessWithLookupTable: create output data product L0 temperature" ) ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- log.debug( "TestDataProcessWithLookupTable: create L0 all data_process start") try: in_prods = [] in_prods.append(ctd_parsed_data_product) out_prods = [ ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id, ctd_l0_temperature_output_dp_id ] ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L0_all_dprocdef_id, in_data_product_ids=in_prods, out_data_product_ids=out_prods) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) log.debug( "TestDataProcessWithLookupTable: create L0 all data_process return" ) data_process = self.rrclient.read(ctd_l0_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=ctd_l0_all_data_process_id, predicate=PRED.hasProcess, object_type=RT.Process, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) contents = "this is the lookup table contents for L0 Conductivity - Temperature - Pressure: Data Process , replace with a file..." att = IonObject(RT.Attachment, name='processLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) processAttachment = self.rrclient.create_attachment( ctd_l0_all_data_process_id, att) log.info( 'TestDataProcessWithLookupTable: InstrumentDevice attachment id = %s', processAttachment)
class TestCTDTransformsIntegration(IonIntegrationTestCase): pdict_id = None def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataset_management = self.datasetclient def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name + '_logger') producer_definition.executable = { 'module': 'ion.processes.data.stream_granule_logger', 'class': 'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition( process_definition=producer_definition) configuration = { 'process': { 'stream_id': stream_id, } } pid = self.processdispatchclient.schedule_process( process_definition_id=logger_procdef_id, configuration=configuration) return pid def _create_instrument_model(self): instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) return instModel_id def _create_instrument_agent(self, instModel_id): raw_config = StreamConfiguration( stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) return instAgent_id def _create_instrument_device(self, instModel_id): instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) return instDevice_id def _create_instrument_agent_instance(self, instAgent_id, instDevice_id): port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) return instAgentInstance_id def _create_param_dicts(self): tdom, sdom = time_series_domain() self.sdom = sdom.dump() self.tdom = tdom.dump() self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) def _create_input_data_products( self, ctd_stream_def_id, instDevice_id, ): dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain=self.tdom, spatial_domain=self.sdom) ctd_parsed_data_product = self.dataproductclient.create_data_product( dp_obj, ctd_stream_def_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) self.dataproductclient.activate_data_product_persistence( data_product_id=ctd_parsed_data_product) #--------------------------------------------------------------------------- # Retrieve the id of the OUTPUT stream from the out Data Product #--------------------------------------------------------------------------- stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True) pid = self.create_logger('ctd_parsed', stream_ids[0]) self.loggerpids.append(pid) #--------------------------------------------------------------------------- # Create CTD Raw as the second data product #--------------------------------------------------------------------------- if not self.pdict_id: self._create_param_dicts() raw_stream_def_id = self.pubsubclient.create_stream_definition( name='SBE37_RAW', parameter_dictionary_id=self.pdict_id) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain=self.tdom, spatial_domain=self.sdom) ctd_raw_data_product = self.dataproductclient.create_data_product( dp_obj, raw_stream_def_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product) self.dataproductclient.activate_data_product_persistence( data_product_id=ctd_raw_data_product) #--------------------------------------------------------------------------- # Retrieve the id of the OUTPUT stream from the out Data Product #--------------------------------------------------------------------------- stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) #--------------------------------------------------------------------------- # Retrieve the id of the OUTPUT stream from the out Data Product #--------------------------------------------------------------------------- stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) print 'Data product streams2 = ', stream_ids return ctd_parsed_data_product def _create_data_process_definitions(self): #------------------------------------------------------------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') self.ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) #------------------------------------------------------------------------------------- # L1 Conductivity: Data Process Definition #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L1_conductivity', description='create the L1 conductivity data product', module='ion.processes.data.transforms.ctd.ctd_L1_conductivity', class_name='CTDL1ConductivityTransform') self.ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) #------------------------------------------------------------------------------------- # L1 Pressure: Data Process Definition #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L1_pressure', description='create the L1 pressure data product', module='ion.processes.data.transforms.ctd.ctd_L1_pressure', class_name='CTDL1PressureTransform') self.ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) #------------------------------------------------------------------------------------- # L1 Temperature: Data Process Definition #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L1_temperature', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L1_temperature', class_name='CTDL1TemperatureTransform') self.ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) #------------------------------------------------------------------------------------- # L2 Salinity: Data Process Definition #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L2_salinity', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_salinity', class_name='SalinityTransform') self.ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) #------------------------------------------------------------------------------------- # L2 Density: Data Process Definition #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L2_density', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_density', class_name='DensityTransform') self.ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) return self.ctd_L0_all_dprocdef_id, self.ctd_L1_conductivity_dprocdef_id,\ self.ctd_L1_pressure_dprocdef_id,self.ctd_L1_temperature_dprocdef_id, \ self.ctd_L2_salinity_dprocdef_id, self.ctd_L2_density_dprocdef_id def _create_stream_definitions(self): if not self.pdict_id: self._create_param_dicts() outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition( name='L0_Conductivity', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_conductivity_id, self.ctd_L0_all_dprocdef_id, binding='conductivity') outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition( name='L0_Pressure', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_pressure_id, self.ctd_L0_all_dprocdef_id, binding='pressure') outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition( name='L0_Temperature', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_temperature_id, self.ctd_L0_all_dprocdef_id, binding='temperature') return outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id def _create_l0_output_data_products(self, outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id): out_data_prods = [] ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) out_data_prods.append(self.ctd_l0_conductivity_output_dp_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l0_conductivity_output_dp_id) ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) out_data_prods.append(self.ctd_l0_pressure_output_dp_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l0_pressure_output_dp_id) ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product( ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) out_data_prods.append(self.ctd_l0_temperature_output_dp_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l0_temperature_output_dp_id) return out_data_prods def _create_l1_out_data_products(self): ctd_l1_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L1_Conductivity', description='transform output L1 conductivity', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product( ctd_l1_conductivity_output_dp_obj, self.outgoing_stream_l1_conductivity_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l1_conductivity_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects( self.ctd_l1_conductivity_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l1_conductivity', stream_ids[0]) self.loggerpids.append(pid) ctd_l1_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L1_Pressure', description='transform output L1 pressure', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product( ctd_l1_pressure_output_dp_obj, self.outgoing_stream_l1_pressure_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l1_pressure_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects( self.ctd_l1_pressure_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l1_pressure', stream_ids[0]) self.loggerpids.append(pid) ctd_l1_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L1_Temperature', description='transform output L1 temperature', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product( ctd_l1_temperature_output_dp_obj, self.outgoing_stream_l1_temperature_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l1_temperature_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects( self.ctd_l1_temperature_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l1_temperature', stream_ids[0]) self.loggerpids.append(pid) def _create_l2_out_data_products(self): #------------------------------- # L2 Salinity - Density: Output Data Products #------------------------------- if not self.pdict_id: self._create_param_dicts() outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition( name='L2_salinity', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l2_salinity_id, self.ctd_L2_salinity_dprocdef_id, binding='salinity') outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition( name='L2_Density', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l2_density_id, self.ctd_L2_density_dprocdef_id, binding='density') ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct, name='L2_Salinity', description='transform output L2 salinity', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product( ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l2_salinity_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects( self.ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l2_salinity', stream_ids[0]) self.loggerpids.append(pid) ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, name='L2_Density', description='transform output pressure', temporal_domain=self.tdom, spatial_domain=self.sdom) self.ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product( ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id) self.dataproductclient.activate_data_product_persistence( data_product_id=self.ctd_l2_density_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects( self.ctd_l2_density_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l2_density', stream_ids[0]) self.loggerpids.append(pid) @unittest.skip('This test errors on coi-nightly, may be OBE.') def test_createTransformsThenActivateInstrument(self): self.loggerpids = [] #------------------------------------------------------------------------------------- # Create InstrumentModel #------------------------------------------------------------------------------------- instModel_id = self._create_instrument_model() #------------------------------------------------------------------------------------- # Create InstrumentAgent #------------------------------------------------------------------------------------- instAgent_id = self._create_instrument_agent(instModel_id) #------------------------------------------------------------------------------------- # Create InstrumentDevice #------------------------------------------------------------------------------------- instDevice_id = self._create_instrument_device(instModel_id) #------------------------------------------------------------------------------------- # Create Instrument Agent Instance #------------------------------------------------------------------------------------- instAgentInstance_id = self._create_instrument_agent_instance( instAgent_id, instDevice_id) #------------------------------------------------------------------------------------- # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------- self._create_param_dicts() ctd_stream_def_id = self.pubsubclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=self.pdict_id) #------------------------------------------------------------------------------------- # Create two data products #------------------------------------------------------------------------------------- ctd_parsed_data_product = self._create_input_data_products( ctd_stream_def_id, instDevice_id) #------------------------------------------------------------------------------------- # Create data process definitions #------------------------------------------------------------------------------------- self._create_data_process_definitions() #------------------------------------------------------------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------------------------------------------------------------- outgoing_stream_l0_conductivity_id, \ outgoing_stream_l0_pressure_id, \ outgoing_stream_l0_temperature_id = self._create_stream_definitions() self.out_prod_ids = self._create_l0_output_data_products( outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id) self.outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition( name='L1_conductivity', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.outgoing_stream_l1_conductivity_id, self.ctd_L1_conductivity_dprocdef_id, binding='conductivity') self.outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition( name='L1_Pressure', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.outgoing_stream_l1_pressure_id, self.ctd_L1_pressure_dprocdef_id, binding='pressure') self.outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition( name='L1_Temperature', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.outgoing_stream_l1_temperature_id, self.ctd_L1_temperature_dprocdef_id, binding='temperature') self._create_l1_out_data_products() self._create_l2_out_data_products() #------------------------------------------------------------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------------------------------------------------------------- ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=self.ctd_L0_all_dprocdef_id, in_data_product_ids=[ctd_parsed_data_product], out_data_product_ids=self.out_prod_ids) self.dataprocessclient.activate_data_process( ctd_l0_all_data_process_id) data_process = self.rrclient.read(ctd_l0_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=ctd_l0_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) extended_process = self.dataprocessclient.get_data_process_extension( ctd_l0_all_data_process_id) self.assertEquals(extended_process.computed.operational_state.status, ComputedValueAvailability.NOTAVAILABLE) self.assertEquals(data_process.message_controllable, True) #------------------------------------------------------------------------------------- # L1 Conductivity: Create the data process #------------------------------------------------------------------------------------- l1_conductivity_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L1_conductivity_dprocdef_id, [self.ctd_l0_conductivity_output_dp_id], [self.ctd_l1_conductivity_output_dp_id]) self.dataprocessclient.activate_data_process( l1_conductivity_data_process_id) data_process = self.rrclient.read(l1_conductivity_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l1_conductivity_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L1 Pressure: Create the data process #------------------------------------------------------------------------------------- l1_pressure_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L1_pressure_dprocdef_id, [self.ctd_l0_pressure_output_dp_id], [self.ctd_l1_pressure_output_dp_id]) self.dataprocessclient.activate_data_process( l1_pressure_data_process_id) data_process = self.rrclient.read(l1_pressure_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l1_pressure_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L1 Temperature: Create the data process #------------------------------------------------------------------------------------- l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L1_temperature_dprocdef_id, [self.ctd_l0_temperature_output_dp_id], [self.ctd_l1_temperature_output_dp_id]) self.dataprocessclient.activate_data_process( l1_temperature_all_data_process_id) data_process = self.rrclient.read(l1_temperature_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l1_temperature_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L2 Salinity: Create the data process #------------------------------------------------------------------------------------- l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product], [self.ctd_l2_salinity_output_dp_id]) self.dataprocessclient.activate_data_process( l2_salinity_all_data_process_id) data_process = self.rrclient.read(l2_salinity_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l2_salinity_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L2 Density: Create the data process #------------------------------------------------------------------------------------- l2_density_all_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L2_density_dprocdef_id, [ctd_parsed_data_product], [self.ctd_l2_density_output_dp_id]) self.dataprocessclient.activate_data_process( l2_density_all_data_process_id) data_process = self.rrclient.read(l2_density_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l2_density_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------------------------------------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) # Wait for instrument agent to spawn gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (15), "The instrument agent instance did not spawn in 15 seconds") # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) #------------------------------------------------------------------------------------- # Streaming #------------------------------------------------------------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) #todo ResourceAgentClient no longer has method set_param # # Make sure the sampling rate and transmission are sane. # params = { # SBE37Parameter.NAVG : 1, # SBE37Parameter.INTERVAL : 5, # SBE37Parameter.TXREALTIME : True # } # self._ia_client.set_param(params) #todo There is no ResourceAgentEvent attribute for go_streaming... so what should be the command for it? cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) # This gevent sleep is there to test the autosample time, which will show something different from default # only if the instrument runs for over a minute gevent.sleep(90) extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) autosample_string = extended_instrument.computed.uptime.value autosampling_time = int(autosample_string.split()[4]) self.assertTrue(autosampling_time > 0) cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) #todo There is no ResourceAgentEvent attribute for go_observatory... so what should be the command for it? # log.debug("test_activateInstrumentStream: calling go_observatory") # cmd = AgentCommand(command='go_observatory') # reply = self._ia_client.execute_agent(cmd) # cmd = AgentCommand(command='get_current_state') # retval = self._ia_client.execute_agent(cmd) # state = retval.result # log.debug("test_activateInstrumentStream: return from go_observatory state %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) #------------------------------------------------------------------------------------------------- # Cleanup processes #------------------------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) #-------------------------------------------------------------------------------- # Cleanup data products #-------------------------------------------------------------------------------- dp_ids, _ = self.rrclient.find_resources(restype=RT.DataProduct, id_only=True) for dp_id in dp_ids: self.dataproductclient.delete_data_product(dp_id)
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase): # DataHandler config DVR_CONFIG = { 'dvr_mod': 'ion.agents.data.handlers.base_data_handler', 'dvr_cls': 'DummyDataHandler', } def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') log.debug("TestExternalDatasetAgentMgmt: started services") # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) # @unittest.skip('not yet working. fix activate_data_product_persistence()') #@unittest.skip() @unittest.skip('not working') def test_activateDatasetAgent(self): # Create ExternalDatasetModel datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries") try: datasetModel_id = self.damsclient.create_external_dataset_model( datsetModel_obj) except BadRequest as ex: self.fail("failed to create new ExternalDatasetModel: %s" % ex) log.debug( "TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id)) # Create ExternalDatasetAgent datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module=EDA_MOD, handler_class=EDA_CLS) try: datasetAgent_id = self.damsclient.create_external_dataset_agent( datasetAgent_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new ExternalDatasetAgent: %s" % ex) log.debug( "TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id)) # Create ExternalDataset log.debug( 'TestExternalDatasetAgentMgmt: Create external dataset resource ') extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset") try: extDataset_id = self.damsclient.create_external_dataset( extDataset_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new external dataset resource: %s" % ex) log.debug( "TestExternalDatasetAgentMgmt: new ExternalDataset id = %s ", str(extDataset_id)) #register the dataset as a data producer dproducer_id = self.damsclient.register_external_data_set( extDataset_id) # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=pdict_id) log.debug( "TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id)) log.debug( "TestExternalDatasetAgentMgmt: Creating new data product with a stream definition" ) dp_obj = IonObject(RT.DataProduct, name='eoi dataset data', description=' stream test') dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') data_product_id1 = self.dpclient.create_data_product( dp_obj, ctd_stream_def_id) log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1)) self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1) #todo fix the problem here.... self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids)) stream_id = stream_ids[0] # Build a taxonomy for the dataset tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') # Augment the DVR_CONFIG with the necessary pieces self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members 'data_producer_id': dproducer_id, # 'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id 'taxonomy': tx.dump(), #TODO: Currently does not support sets 'max_records': 4, } # Create agent config. self._stream_config = {} agent_config = { 'driver_config': self.DVR_CONFIG, 'stream_config': self._stream_config, 'agent': { 'resource_id': EDA_RESOURCE_ID }, 'test_mode': True } extDatasetAgentInstance_obj = IonObject( RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=agent_config) extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance( external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj)) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id)) #Check that the instance is currently not active id, active = self.damsclient.retrieve_external_dataset_agent_instance( extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 1 = %s ", str(id), str(active)) self.damsclient.start_external_dataset_agent_instance( extDatasetAgentInstance_id) dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance( extDatasetAgentInstance_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj)) # now the instance process should be active id, active = self.damsclient.retrieve_external_dataset_agent_instance( extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 2 = %s ", str(id), str(active)) # Start a resource agent client to talk with the instrument agent. self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess()) print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client)) # cmd=AgentCommand(command='initialize') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='go_active') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='run') # _ = self._dsa_client.execute_agent(cmd) # # log.info('Send an unconstrained request for data (\'new data\')') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # log.info('Send a second unconstrained request for data (\'new data\'), should be rejected') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # cmd = AgentCommand(command='reset') # _ = self._dsa_client.execute_agent(cmd) # cmd = AgentCommand(command='get_current_state') # retval = self._dsa_client.execute_agent(cmd) # state = retval.result # TODO: Think about what we really should be testing at this point # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states() # TODO: Do we also need to show data retrieval? cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) cmd = AgentCommand(command='initialize') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.INACTIVE) cmd = AgentCommand(command='go_active') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='pause') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command='resume') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='clear') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='pause') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command='clear') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='reset') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.damsclient.stop_external_dataset_agent_instance( extDatasetAgentInstance_id) def test_dataset_agent_prepare_support(self): eda_sup = self.damsclient.prepare_external_dataset_agent_support() eda_obj = IonObject(RT.ExternalDatasetAgent, name="ExternalDatasetAgent") eda_id = self.damsclient.create_external_dataset_agent(eda_obj) eda_sup = self.damsclient.prepare_external_dataset_agent_support( external_dataset_agent_id=eda_id) edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support( ) edai_obj = IonObject(RT.ExternalDatasetAgentInstance, name="ExternalDatasetAgentInstance") edai_id = self.damsclient.create_external_dataset_agent_instance( edai_obj) edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support( external_dataset_agent_instance_id=edai_id)
class TestBulkIngest(IonIntegrationTestCase): EDA_MOD = 'ion.agents.data.external_dataset_agent' EDA_CLS = 'ExternalDatasetAgent' def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.data_retriever = DataRetrieverServiceClient(node=self.container.node) self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) # Data async and subscription TODO: Replace with new subscriber self._finished_count = None #TODO: Switch to gevent.queue.Queue self._async_finished_result = AsyncResult() self._finished_events_received = [] self._finished_event_subscriber = None self._start_finished_event_subscriber() self.addCleanup(self._stop_finished_event_subscriber) self.DVR_CONFIG = {} self.DVR_CONFIG = { 'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler', 'dvr_cls' : 'SlocumDataHandler', } self._setup_resources() self.agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : {}, 'agent' : {'resource_id': self.EDA_RESOURCE_ID}, 'test_mode' : True } datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='ExternalDatasetAgentInstance1', description='external data agent instance', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS, dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config ) self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj, external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID) #TG: Setup/configure the granule logger to log granules as they're published pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id) dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id) print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('datasetagentclient', name=pid, process=FakeProcess()) log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name+'_logger') producer_definition.executable = { 'module':'ion.processes.data.stream_granule_logger', 'class':'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition) configuration = { 'process':{ 'stream_id':stream_id, } } pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration) return pid def _start_finished_event_subscriber(self): def consume_event(*args,**kwargs): log.debug('EventSubscriber event received: %s', str(args[0]) ) if args[0].description == 'TestingFinished': log.debug('TestingFinished event received') self._finished_events_received.append(args[0]) if self._finished_count and self._finished_count == len(self._finished_events_received): log.debug('Finishing test...') self._async_finished_result.set(len(self._finished_events_received)) log.debug('Called self._async_finished_result.set({0})'.format(len(self._finished_events_received))) self._finished_event_subscriber = EventSubscriber(event_type='DeviceEvent', callback=consume_event) self._finished_event_subscriber.start() def _stop_finished_event_subscriber(self): if self._finished_event_subscriber: self._finished_event_subscriber.stop() self._finished_event_subscriber = None def tearDown(self): pass @unittest.skip('Update to agent refactor.') def test_slocum_data_ingest(self): HIST_CONSTRAINTS_1 = {} # Test instrument driver execute interface to start and stop streaming mode. cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) cmd = AgentCommand(command='initialize') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.INACTIVE) cmd = AgentCommand(command='go_active') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) # Make sure the polling interval is appropriate for a test params = { 'POLLING_INTERVAL': 3 } self._ia_client.set_param(params) self._finished_count = 1 cmd = AgentCommand(command='acquire_data') self._ia_client.execute(cmd) # Assert that data was received self._async_finished_result.get(timeout=15) self.assertTrue(len(self._finished_events_received) >= 1) cmd = AgentCommand(command='reset') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) #todo enable after Luke's mor to retrieve, right now must have the Time axis called 'time' # replay_granule = self.data_retriever.retrieve_last_data_points(self.dataset_id, 10) # rdt = RecordDictionaryTool.load_from_granule(replay_granule) # # comp = rdt['date_pattern'] == numpy.arange(10) + 10 # # log.debug("TestBulkIngest: comp: %s", comp) # # self.assertTrue(comp.all()) for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) def _setup_resources(self): self.loggerpids = [] # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSetModel dataset_model = ExternalDatasetModel(name='slocum_model') dataset_model.datset_type = 'SLOCUM' dataset_model_id = self.dams_client.create_external_dataset_model(dataset_model) # Create ExternalDataset ds_name = 'slocum_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['base_url'] = 'test_data/slocum/' dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat' dset.dataset_description.parameters['date_pattern'] = '%Y %j' dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ 'c_wpt_y_lmc', 'sci_water_cond', 'm_y_lmc', 'u_hd_fin_ap_inflection_holdoff', 'sci_m_present_time', 'm_leakdetect_voltage_forward', 'sci_bb3slo_b660_scaled', 'c_science_send_all', 'm_gps_status', 'm_water_vx', 'm_water_vy', 'c_heading', 'sci_fl3slo_chlor_units', 'u_hd_fin_ap_gain', 'm_vacuum', 'u_min_water_depth', 'm_gps_lat', 'm_veh_temp', 'f_fin_offset', 'u_hd_fin_ap_hardover_holdoff', 'c_alt_time', 'm_present_time', 'm_heading', 'sci_bb3slo_b532_scaled', 'sci_fl3slo_cdom_units', 'm_fin', 'x_cycle_overrun_in_ms', 'sci_water_pressure', 'u_hd_fin_ap_igain', 'sci_fl3slo_phyco_units', 'm_battpos', 'sci_bb3slo_b470_scaled', 'm_lat', 'm_gps_lon', 'sci_ctd41cp_timestamp', 'm_pressure', 'c_wpt_x_lmc', 'c_ballast_pumped', 'x_lmc_xy_source', 'm_lon', 'm_avg_speed', 'sci_water_temp', 'u_pitch_ap_gain', 'm_roll', 'm_tot_num_inflections', 'm_x_lmc', 'u_pitch_ap_deadband', 'm_final_water_vy', 'm_final_water_vx', 'm_water_depth', 'm_leakdetect_voltage', 'u_pitch_max_delta_battpos', 'm_coulomb_amphr', 'm_pitch', ] ## Create the external dataset ds_id = self.dams_client.create_external_dataset(external_dataset=dset, external_dataset_model_id=dataset_model_id) ext_dprov_id = self.dams_client.create_external_data_provider(external_data_provider=dprov) # Register the ExternalDataset dproducer_id = self.dams_client.register_external_data_set(external_dataset_id=ds_id) ## Create the dataset agent datasetagent_obj = IonObject(RT.ExternalDatasetAgent, name='ExternalDatasetAgent1', description='external data agent', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS ) self.datasetagent_id = self.dams_client.create_external_dataset_agent(external_dataset_agent=datasetagent_obj, external_dataset_model_id=dataset_model_id) # Generate the data product and associate it to the ExternalDataset pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict') streamdef_id = self.pubsub_client.create_stream_definition(name="temp", parameter_dictionary_id=pdict.identifier) tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dprod = IonObject(RT.DataProduct, name='slocum_parsed_product', description='parsed slocum product', temporal_domain = tdom, spatial_domain = sdom) self.dproduct_id = self.dataproductclient.create_data_product(data_product=dprod, stream_definition_id=streamdef_id) self.dams_client.assign_data_product(input_resource_id=ds_id, data_product_id=self.dproduct_id) #save the incoming slocum data self.dataproductclient.activate_data_product_persistence(self.dproduct_id) self.addCleanup(self.dataproductclient.suspend_data_product_persistence, self.dproduct_id) stream_ids, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_ids[0] dataset_id, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True) self.dataset_id = dataset_id[0] pid = self.create_logger('slocum_parsed_product', stream_id ) self.loggerpids.append(pid) self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'param_dictionary':pdict.dump(), 'data_producer_id':dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':20, } # Create the logger for receiving publications #self.create_stream_and_logger(name='slocum',stream_id=stream_id) # Create agent config. self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name
class BulkIngestBase(object): """ awkward, non-obvious test class! subclasses will implement data-specific methods and this test class will parse sample file and assert data was read. test_data_ingest: create resources and call... start_agent: starts agent and then call... start_listener: starts listeners for data, including one that when granule is received calls... get_retrieve_client: asserts that callback had some data See replacement TestPreloadThenLoadDataset. A little more declarative and straight-forward, but much slower (requires preload). """ def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub_management = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node) self.resource_registry = self.container.resource_registry self.context_ids = self.build_param_contexts() self.setup_resources() def build_param_contexts(self): raise NotImplementedError('build_param_contexts must be implemented in child classes') def create_external_dataset(self): raise NotImplementedError('create_external_dataset must be implemented in child classes') def get_dvr_config(self): raise NotImplementedError('get_dvr_config must be implemented in child classes') def get_retrieve_client(self, dataset_id=''): raise NotImplementedError('get_retrieve_client must be implemented in child classes') def test_data_ingest(self): self.pdict_id = self.create_parameter_dict(self.name) self.stream_def_id = self.create_stream_def(self.name, self.pdict_id) self.data_product_id = self.create_data_product(self.name, self.description, self.stream_def_id) self.dataset_id = self.get_dataset_id(self.data_product_id) self.stream_id, self.route = self.get_stream_id_and_route(self.data_product_id) self.external_dataset_id = self.create_external_dataset() self.data_producer_id = self.register_external_dataset(self.external_dataset_id) self.start_agent() def create_parameter_dict(self, name=''): return self.dataset_management.create_parameter_dictionary(name=name, parameter_context_ids=self.context_ids, temporal_context='time') def create_stream_def(self, name='', pdict_id=''): return self.pubsub_management.create_stream_definition(name=name, parameter_dictionary_id=pdict_id) def create_data_product(self, name='', description='', stream_def_id=''): tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = DataProduct( name=name, description=description, processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id) self.data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id) return data_product_id def register_external_dataset(self, external_dataset_id=''): return self.data_acquisition_management.register_external_data_set(external_dataset_id=external_dataset_id) def get_dataset_id(self, data_product_id=''): dataset_ids, assocs = self.resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True) return dataset_ids[0] def get_stream_id_and_route(self, data_product_id): stream_ids, _ = self.resource_registry.find_objects(data_product_id, PRED.hasStream, RT.Stream, id_only=True) stream_id = stream_ids[0] route = self.pubsub_management.read_stream_route(stream_id) #self.create_logger(self.name, stream_id) return stream_id, route def start_agent(self): agent_config = { 'driver_config': self.get_dvr_config(), 'stream_config': {}, 'agent': {'resource_id': self.external_dataset_id}, 'test_mode': True } self._ia_pid = self.container.spawn_process( name=self.EDA_NAME, module=self.EDA_MOD, cls=self.EDA_CLS, config=agent_config) self._ia_client = ResourceAgentClient(self.external_dataset_id, process=FakeProcess()) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) self._ia_client.execute_agent(cmd) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) self._ia_client.execute_agent(cmd) cmd = AgentCommand(command=ResourceAgentEvent.RUN) self._ia_client.execute_agent(cmd) cmd = AgentCommand(command=DriverEvent.START_AUTOSAMPLE) self._ia_client.execute_resource(command=cmd) self.start_listener(self.dataset_id) def stop_agent(self): cmd = AgentCommand(command=DriverEvent.STOP_AUTOSAMPLE) self._ia_client.execute_resource(cmd) cmd = AgentCommand(command=ResourceAgentEvent.RESET) self._ia_client.execute_agent(cmd) self.container.terminate_process(self._ia_pid) def start_listener(self, dataset_id=''): dataset_modified = Event() #callback to use retrieve to get data from the coverage def cb(*args, **kwargs): self.get_retrieve_client(dataset_id=dataset_id) #callback to keep execution going once dataset has been fully ingested def cb2(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id) es.start() es2 = EventSubscriber(event_type=OT.DeviceCommonLifecycleEvent, callback=cb2, origin='BaseDataHandler._acquire_sample') es2.start() self.addCleanup(es.stop) self.addCleanup(es2.stop) #let it go for up to 120 seconds, then stop the agent and reset it dataset_modified.wait(120) self.stop_agent() def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name+'_logger') producer_definition.executable = { 'module':'ion.processes.data.stream_granule_logger', 'class':'StreamGranuleLogger' } logger_procdef_id = self.process_dispatch_client.create_process_definition(process_definition=producer_definition) configuration = { 'process':{ 'stream_id':stream_id, } } pid = self.process_dispatch_client.schedule_process(process_definition_id=logger_procdef_id, configuration=configuration) return pid
class TestIntExternalObservatoryAgentService(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml') # self.eoas_cli = ExternalObservatoryAgentServiceClient() # self.rr_cli = ResourceRegistryServiceClient() self.dams_cli = DataAcquisitionManagementServiceClient() self.dpms_cli = DataProductManagementServiceClient() self._setup_ncom() self._setup_hfr() # eoas_proc = self.container.proc_manager.procs_by_name['external_data_agent_management'] # log.debug("Got EOAS Process: %s" % eoas_proc) self._ncom_agt_cli = ResourceAgentClient( resource_id=self.ncom_ds_id, name='external_observatory_agent', process=FakeProcess()) log.debug("Got a ResourceAgentClient: res_id=%s" % self._ncom_agt_cli.resource_id) self._hfr_agt_cli = ResourceAgentClient( resource_id=self.hfr_ds_id, name='external_observatory_agent', process=FakeProcess()) log.debug("Got a ResourceAgentClient: res_id=%s" % self._hfr_agt_cli.resource_id) def _setup_ncom(self): # TODO: some or all of this (or some variation) should move to DAMS # Create and register the necessary resources/objects eda = ExternalDatasetAgent() eda_id = self.dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = self.dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) # dprov.institution.name = "OOI CGSN" dprov.contact.name = "Robert Weller" dprov.contact.email = "*****@*****.**" # Create DataSource dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation()) # dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/" dsrc.connection_params["base_data_url"] = "" dsrc.contact.name = "Rich Signell" dsrc.contact.email = "*****@*****.**" # Create ExternalDataset dset = ExternalDataset(name="test", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc" dset.dataset_description.parameters[ "dataset_path"] = "test_data/ncom.nc" dset.dataset_description.parameters["temporal_dimension"] = "time" dset.dataset_description.parameters["zonal_dimension"] = "lon" dset.dataset_description.parameters["meridional_dimension"] = "lat" # Create DataSourceModel dsrc_model = DataSourceModel(name="dap_model") dsrc_model.model = "DAP" dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler" dsrc_model.data_handler_class = "DapExternalDataHandler" ## Run everything through DAMS ds_id = self.ncom_ds_id = self.dams_cli.create_external_dataset( external_dataset=dset) ext_dprov_id = self.dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = self.dams_cli.register_external_data_set( external_dataset_id=ds_id) ## Associate everything # Convenience method # self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Or using each method self.dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) self.dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) self.dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) self.dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id) # self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='ncom_product', description='raw ncom product') dproduct_id = self.dpms_cli.create_data_product(data_product=dprod) self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) def _setup_hfr(self): # TODO: some or all of this (or some variation) should move to DAMS # Create and register the necessary resources/objects eda = ExternalDatasetAgent() eda_id = self.dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = self.dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) # dprov.institution.name = "HFR UCSD" # Create DataSource dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation()) dsrc.connection_params[ "base_data_url"] = "http://hfrnet.ucsd.edu:8080/thredds/dodsC/" # Create ExternalDataset dset = ExternalDataset(name="UCSD HFR", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters[ "dataset_path"] = "HFRNet/USEGC/6km/hourly/RTV" # dset.dataset_description.parameters["dataset_path"] = "test_data/hfr.nc" dset.dataset_description.parameters["temporal_dimension"] = "time" dset.dataset_description.parameters["zonal_dimension"] = "lon" dset.dataset_description.parameters["meridional_dimension"] = "lat" # Create DataSourceModel dsrc_model = DataSourceModel(name="dap_model") dsrc_model.model = "DAP" dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler" dsrc_model.data_handler_class = "DapExternalDataHandler" ## Run everything through DAMS ds_id = self.hfr_ds_id = self.dams_cli.create_external_dataset( external_dataset=dset) ext_dprov_id = self.dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = self.dams_cli.register_external_data_set( external_dataset_id=ds_id) ## Associate everything # Convenience method # self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Or using each method self.dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) self.dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) self.dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) self.dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id) # self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='hfr_product', description='raw hfr product') dproduct_id = self.dpms_cli.create_data_product(data_product=dprod) self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) ########## Tests ########## @unittest.skip("Currently broken due to resource/agent refactorings") def test_get_capabilities(self): # Get all the capabilities caps = self._ncom_agt_cli.get_capabilities() log.debug("all capabilities: %s" % caps) lst = [['RES_CMD', 'acquire_data'], ['RES_CMD', 'acquire_data_by_request'], ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'], ['RES_CMD', 'compare'], ['RES_CMD', 'get_attributes'], ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'], ['RES_CMD', 'has_new_data']] self.assertEquals(caps, lst) caps = self._ncom_agt_cli.get_capabilities( capability_types=['RES_CMD']) log.debug("resource commands: %s" % caps) lst = [['RES_CMD', 'acquire_data'], ['RES_CMD', 'acquire_data_by_request'], ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'], ['RES_CMD', 'compare'], ['RES_CMD', 'get_attributes'], ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'], ['RES_CMD', 'has_new_data']] self.assertEquals(caps, lst) caps = self._ncom_agt_cli.get_capabilities( capability_types=['RES_PAR']) log.debug("resource commands: %s" % caps) self.assertEqual(type(caps), list) caps = self._ncom_agt_cli.get_capabilities( capability_types=['AGT_CMD']) log.debug("resource commands: %s" % caps) self.assertEqual(type(caps), list) caps = self._ncom_agt_cli.get_capabilities( capability_types=['AGT_PAR']) log.debug("resource commands: %s" % caps) self.assertEqual(type(caps), list) @unittest.skip("Currently broken due to resource/agent refactorings") def test_execute_get_attrs(self): cmd = AgentCommand(command_id="111", command="get_attributes") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) @unittest.skip("Currently broken due to resource/agent refactorings") def test_execute_get_fingerprint(self): cmd = AgentCommand(command_id="111", command="get_fingerprint") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) @unittest.skip("Currently broken due to resource/agent refactorings") def test_execute_single_worker(self): cmd = AgentCommand(command_id="111", command="get_attributes") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) cmd = AgentCommand(command_id="112", command="get_fingerprint") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) @unittest.skip("Currently broken due to resource/agent refactorings") def test_execute_multi_worker(self): cmd = AgentCommand(command_id="111", command="has_new_data") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) cmd = AgentCommand(command_id="111", command="has_new_data") log.debug("Execute AgentCommand: %s" % cmd) ret = self._hfr_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) cmd = AgentCommand(command_id="112", command="get_fingerprint") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) cmd = AgentCommand(command_id="112", command="get_fingerprint") log.debug("Execute AgentCommand: %s" % cmd) ret = self._hfr_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) self.assertTrue(type(ret.result), dict) @unittest.skip("Currently broken due to resource/agent refactorings") def test_execute_acquire_data(self): cmd = AgentCommand(command_id="113", command="acquire_data") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) @unittest.skip("Currently broken due to resource/agent refactorings") def test_execute_acquire_new_data(self): cmd = AgentCommand(command_id="113", command="acquire_new_data") log.debug("Execute AgentCommand: %s" % cmd) ret = self._ncom_agt_cli.execute(cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, 0) @unittest.skip("Underlying method not yet implemented") def test_set_param(self): res_id = self.ncom_ds_id log.debug("test_get_worker with res_id: %s" % res_id) res = self.eoas_cli.get_worker(res_id) with self.assertRaises(IonException): self.eoas_cli.set_param(resource_id=res_id, name="param", value="value") @unittest.skip("Underlying method not yet implemented") def test_get_param(self): res_id = self.ncom_ds_id log.debug("test_get_worker with res_id: %s" % res_id) res = self.eoas_cli.get_worker(res_id) with self.assertRaises(IonException): self.eoas_cli.get_param(resource_id=res_id, name="param") @unittest.skip("Underlying method not yet implemented") def test_execute_agent(self): res_id = self.ncom_ds_id log.debug("test_get_worker with res_id: %s" % res_id) res = self.eoas_cli.get_worker(res_id) with self.assertRaises(IonException): self.eoas_cli.execute_agent(resource_id=res_id) @unittest.skip("Underlying method not yet implemented") def test_set_agent_param(self): res_id = self.ncom_ds_id log.debug("test_get_worker with res_id: %s" % res_id) res = self.eoas_cli.get_worker(res_id) with self.assertRaises(IonException): self.eoas_cli.set_agent_param(resource_id=res_id, name="param", value="value") @unittest.skip("Underlying method not yet implemented") def test_get_agent_param(self): res_id = self.ncom_ds_id log.debug("test_get_worker with res_id: %s" % res_id) res = self.eoas_cli.get_worker(res_id) with self.assertRaises(IonException): self.eoas_cli.get_agent_param(resource_id=res_id, name="param")
class TestDriverEgg(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher() def get_datastore(self, dataset_id): dataset = self.datasetclient.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def get_streamConfigs(self): raw_config = StreamConfiguration( stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') return raw_config, parsed_config ########################## # # The following tests generate different agent configs and pass them to a common base test script # ########################### @unittest.skip( "this test can't be run from coi services. it is missing dependencies") def test_driverLaunchModuleNoURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver", stream_configurations=[raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) def test_driverLaunchModuleWithURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) def test_driverLaunchNoModuleOnlyURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", #driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", #driver_class="SBE37Driver", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) def test_driverLaunchBogusModuleWithURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="bogus", driver_class="Bogus", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) @unittest.skip( "Launches an egg 'process' even though the egg download should produce error 404" ) def test_driverLaunchNoModule404URI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", #driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", #driver_class="SBE37Driver", driver_uri=DRV_URI_404, stream_configurations=[raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj, False) def test_driverLaunchNoModuleBadEggURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", #driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", #driver_class="SBE37Driver", driver_uri=DRV_URI_BAD, stream_configurations=[raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj, True, False) def base_activateInstrumentSample(self, instAgent_obj, expect_launch=True, expect_command=True): """ This method runs a test of launching a driver with a given agent configuration """ # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) print 'new InstrumentModel id = %s ' % instModel_id # Create InstrumentAgent instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) print 'new InstrumentAgent id = %s' % instAgent_id self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_raw_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_stream_def_id = self.pubsubcli.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) print 'new dp_id = %s' % data_product_id1 self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) print 'Data product streams1 = %s' % stream_ids # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) print 'Data set for data_product_id1 = %s' % dataset_ids[0] self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data self.get_datastore(self.parsed_dataset) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) print 'new dp_id = %s' % str(data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) print 'Data product streams2 = %s' % str(stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) print 'Data set for data_product_id2 = %s' % dataset_ids[0] self.raw_dataset = dataset_ids[0] # add start/stop for instrument agent gevent.joinall([ gevent.spawn( lambda: self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id)) ]) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) agent_process_id = ResourceAgentClient._get_agent_process_id( instDevice_id) print "Agent process id is '%s'" % str(agent_process_id) self.assertTrue(agent_process_id) gate = ProcessStateGate(self.processdispatchclient.read_process, agent_process_id, ProcessStateEnum.RUNNING) if not expect_launch: self.assertFalse( gate. await (30), "The instance (%s) of bogus instrument agent spawned in 30 seconds ?!?" % agent_process_id) return self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % agent_process_id) print "Instrument Agent Instance successfully triggered ProcessStateGate as RUNNING" #print 'Instrument agent instance obj: = %s' % str(inst_agent_instance_obj) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=agent_process_id, process=FakeProcess()) print "ResourceAgentClient created: %s" % str(self._ia_client) print "Sending command=ResourceAgentEvent.INITIALIZE" cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) if not expect_command: self.assertRaises(ServerError, self._ia_client.execute_agent, cmd) return retval = self._ia_client.execute_agent(cmd) print "Result of INITIALIZE: %s" % str(retval) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.INACTIVE) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) # This gevent sleep is there to test the autosample time, which will show something different from default # only if the instrument runs for over a minute gevent.sleep(90) extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) autosample_string = extended_instrument.computed.uptime.value autosampling_time = int(autosample_string.split()[4]) self.assertTrue(autosampling_time > 0) cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) print "Sending command=ResourceAgentEvent.RESET" cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) print "Result of RESET: %s" % str(reply)
class TestDataProcessManagementPrime(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management = DatasetManagementServiceClient() self.resource_registry = self.container.resource_registry self.pubsub_management = PubsubManagementServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.validators = 0 def lc_preload(self): config = DotDict() config.op = 'load' config.scenario = 'BASE,LC_TEST' config.categories = 'ParameterFunctions,ParameterDefs,ParameterDictionary' config.path = 'res/preload/r2_ioc' self.container.spawn_process('preload','ion.processes.bootstrap.ion_loader','IONLoader', config) def ctd_plain_input_data_product(self): available_fields = [ 'internal_timestamp', 'temp', 'preferred_timestamp', 'time', 'port_timestamp', 'quality_flag', 'lat', 'conductivity', 'driver_timestamp', 'lon', 'pressure'] return self.make_data_product('ctd_parsed_param_dict', 'ctd plain test', available_fields) def ctd_plain_salinity(self): available_fields = [ 'internal_timestamp', 'preferred_timestamp', 'time', 'port_timestamp', 'quality_flag', 'lat', 'driver_timestamp', 'lon', 'salinity'] return self.make_data_product('ctd_parsed_param_dict', 'salinity', available_fields) def ctd_plain_density(self): available_fields = [ 'internal_timestamp', 'preferred_timestamp', 'time', 'port_timestamp', 'quality_flag', 'lat', 'driver_timestamp', 'lon', 'density'] return self.make_data_product('ctd_parsed_param_dict', 'density', available_fields) def ctd_instrument_data_product(self): available_fields = [ 'internal_timestamp', 'temp', 'preferred_timestamp', 'time', 'port_timestamp', 'quality_flag', 'lat', 'conductivity', 'driver_timestamp', 'lon', 'pressure'] return self.make_data_product('ctd_LC_TEST', 'ctd instrument', available_fields) def make_data_product(self, pdict_name, dp_name, available_fields=[]): pdict_id = self.dataset_management.read_parameter_dictionary_by_name(pdict_name, id_only=True) stream_def_id = self.pubsub_management.create_stream_definition('%s stream_def' % dp_name, parameter_dictionary_id=pdict_id, available_fields=available_fields or None) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = DataProduct(name=dp_name) dp_obj.temporal_domain = tdom dp_obj.spatial_domain = sdom data_product_id = self.data_product_management.create_data_product(dp_obj, stream_definition_id=stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) return data_product_id def google_dt_data_product(self): return self.make_data_product('google_dt', 'visual') def ctd_derived_data_product(self): return self.make_data_product('ctd_LC_TEST', 'ctd derived products') def publish_to_plain_data_product(self, data_product_id): stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True) self.assertTrue(len(stream_ids)) stream_id = stream_ids.pop() route = self.pubsub_management.read_stream_route(stream_id) stream_definition = self.pubsub_management.read_stream_definition(stream_id=stream_id) stream_def_id = stream_definition._id publisher = StandaloneStreamPublisher(stream_id, route) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) now = time.time() ntp_now = now + 2208988800 # Do not use in production, this is a loose translation rdt['internal_timestamp'] = [ntp_now] rdt['temp'] = [20.0] rdt['preferred_timestamp'] = ['driver_timestamp'] rdt['time'] = [ntp_now] rdt['port_timestamp'] = [ntp_now] rdt['quality_flag'] = [None] rdt['lat'] = [45] rdt['conductivity'] = [4.2914] rdt['driver_timestamp'] = [ntp_now] rdt['lon'] = [-71] rdt['pressure'] = [3.068] granule = rdt.to_granule() publisher.publish(granule) def publish_to_data_product(self, data_product_id): stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True) self.assertTrue(len(stream_ids)) stream_id = stream_ids.pop() route = self.pubsub_management.read_stream_route(stream_id) stream_definition = self.pubsub_management.read_stream_definition(stream_id=stream_id) stream_def_id = stream_definition._id publisher = StandaloneStreamPublisher(stream_id, route) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) now = time.time() ntp_now = now + 2208988800 # Do not use in production, this is a loose translation rdt['internal_timestamp'] = [ntp_now] rdt['temp'] = [300000] rdt['preferred_timestamp'] = ['driver_timestamp'] rdt['time'] = [ntp_now] rdt['port_timestamp'] = [ntp_now] rdt['quality_flag'] = [None] rdt['lat'] = [45] rdt['conductivity'] = [4341400] rdt['driver_timestamp'] = [ntp_now] rdt['lon'] = [-71] rdt['pressure'] = [256.8] granule = rdt.to_granule() publisher.publish(granule) def setup_subscriber(self, data_product_id, callback): stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True) self.assertTrue(len(stream_ids)) stream_id = stream_ids.pop() sub_id = self.pubsub_management.create_subscription('validator_%s'%self.validators, stream_ids=[stream_id]) self.addCleanup(self.pubsub_management.delete_subscription, sub_id) self.pubsub_management.activate_subscription(sub_id) self.addCleanup(self.pubsub_management.deactivate_subscription, sub_id) subscriber = StandaloneStreamSubscriber('validator_%s' % self.validators, callback=callback) subscriber.start() self.addCleanup(subscriber.stop) self.validators+=1 return subscriber def create_density_transform_function(self): tf = TransformFunction(name='ctdbp_l2_density', module='ion.processes.data.transforms.ctdbp.ctdbp_L2_density', cls='CTDBP_DensityTransformAlgorithm') tf_id = self.data_process_management.create_transform_function(tf) self.addCleanup(self.data_process_management.delete_transform_function, tf_id) return tf_id def create_salinity_transform_function(self): tf = TransformFunction(name='ctdbp_l2_salinity', module='ion.processes.data.transforms.ctdbp.ctdbp_L2_salinity', cls='CTDBP_SalinityTransformAlgorithm') tf_id = self.data_process_management.create_transform_function(tf) self.addCleanup(self.data_process_management.delete_transform_function, tf_id) return tf_id @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_data_process_prime(self): self.lc_preload() instrument_data_product_id = self.ctd_instrument_data_product() derived_data_product_id = self.ctd_derived_data_product() data_process_id = self.data_process_management.create_data_process2(in_data_product_ids=[instrument_data_product_id], out_data_product_ids=[derived_data_product_id]) self.addCleanup(self.data_process_management.delete_data_process2, data_process_id) self.data_process_management.activate_data_process2(data_process_id) self.addCleanup(self.data_process_management.deactivate_data_process2, data_process_id) validated = Event() def validation(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) np.testing.assert_array_almost_equal(rdt['conductivity_L1'], np.array([42.914])) np.testing.assert_array_almost_equal(rdt['temp_L1'], np.array([20.])) np.testing.assert_array_almost_equal(rdt['pressure_L1'], np.array([3.068])) np.testing.assert_array_almost_equal(rdt['density'], np.array([1021.7144739593881])) np.testing.assert_array_almost_equal(rdt['salinity'], np.array([30.935132729668283])) validated.set() self.setup_subscriber(derived_data_product_id, callback=validation) self.publish_to_data_product(instrument_data_product_id) self.assertTrue(validated.wait(10)) def test_older_transform(self): input_data_product_id = self.ctd_plain_input_data_product() conductivity_data_product_id = self.make_data_product('ctd_parsed_param_dict', 'conductivity_product', ['time', 'conductivity']) conductivity_stream_def_id = self.get_named_stream_def('conductivity_product stream_def') temperature_data_product_id = self.make_data_product('ctd_parsed_param_dict', 'temperature_product', ['time', 'temp']) temperature_stream_def_id = self.get_named_stream_def('temperature_product stream_def') pressure_data_product_id = self.make_data_product('ctd_parsed_param_dict', 'pressure_product', ['time', 'pressure']) pressure_stream_def_id = self.get_named_stream_def('pressure_product stream_def') dpd = DataProcessDefinition(name='ctdL0') dpd.data_process_type = DataProcessTypeEnum.TRANSFORM dpd.module = 'ion.processes.data.transforms.ctd.ctd_L0_all' dpd.class_name = 'ctd_L0_all' data_process_definition_id = self.data_process_management.create_data_process_definition(dpd) self.addCleanup(self.data_process_management.delete_data_process_definition, data_process_definition_id) self.data_process_management.assign_stream_definition_to_data_process_definition(conductivity_stream_def_id, data_process_definition_id, binding='conductivity') self.data_process_management.assign_stream_definition_to_data_process_definition(temperature_stream_def_id, data_process_definition_id, binding='temperature') self.data_process_management.assign_stream_definition_to_data_process_definition(pressure_stream_def_id, data_process_definition_id, binding='pressure') data_process_id = self.data_process_management.create_data_process2(data_process_definition_id=data_process_definition_id, in_data_product_ids=[input_data_product_id], out_data_product_ids=[conductivity_data_product_id, temperature_data_product_id, pressure_data_product_id]) self.addCleanup(self.data_process_management.delete_data_process2, data_process_id) self.data_process_management.activate_data_process2(data_process_id) self.addCleanup(self.data_process_management.deactivate_data_process2, data_process_id) conductivity_validated = Event() def validate_conductivity(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) np.testing.assert_array_almost_equal(rdt['conductivity'], np.array([4.2914])) conductivity_validated.set() self.setup_subscriber(conductivity_data_product_id, callback=validate_conductivity) temperature_validated = Event() def validate_temperature(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) np.testing.assert_array_almost_equal(rdt['temp'], np.array([20.0])) temperature_validated.set() self.setup_subscriber(temperature_data_product_id, callback=validate_temperature) pressure_validated = Event() def validate_pressure(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) np.testing.assert_array_almost_equal(rdt['pressure'], np.array([3.068])) pressure_validated.set() self.setup_subscriber(pressure_data_product_id, callback=validate_pressure) self.publish_to_plain_data_product(input_data_product_id) self.assertTrue(conductivity_validated.wait(10)) self.assertTrue(temperature_validated.wait(10)) self.assertTrue(pressure_validated.wait(10)) def get_named_stream_def(self, name): stream_def_ids, _ = self.resource_registry.find_resources(name=name, restype=RT.StreamDefinition, id_only=True) return stream_def_ids[0] def test_actors(self): input_data_product_id = self.ctd_plain_input_data_product() output_data_product_id = self.ctd_plain_density() actor = self.create_density_transform_function() route = {input_data_product_id: {output_data_product_id: actor}} config = DotDict() config.process.routes = route config.process.params.lat = 45. config.process.params.lon = -71. data_process_id = self.data_process_management.create_data_process2(in_data_product_ids=[input_data_product_id], out_data_product_ids=[output_data_product_id], configuration=config) self.addCleanup(self.data_process_management.delete_data_process2, data_process_id) self.data_process_management.activate_data_process2(data_process_id) self.addCleanup(self.data_process_management.deactivate_data_process2, data_process_id) validated = Event() def validation(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) # The value I use is a double, the value coming back is only a float32 so there's some data loss but it should be precise to the 4th digit np.testing.assert_array_almost_equal(rdt['density'], np.array([1021.6839775385847]), decimal=4) validated.set() self.setup_subscriber(output_data_product_id, callback=validation) self.publish_to_plain_data_product(input_data_product_id) self.assertTrue(validated.wait(10)) def test_multi_in_out(self): input1 = self.ctd_plain_input_data_product() input2 = self.make_data_product('ctd_parsed_param_dict', 'input2') density_dp_id = self.ctd_plain_density() salinity_dp_id = self.ctd_plain_salinity() density_actor = self.create_density_transform_function() salinity_actor = self.create_salinity_transform_function() routes = { input1 : { density_dp_id : density_actor, salinity_dp_id : salinity_actor }, input2 : { density_dp_id : density_actor } } config = DotDict() config.process.routes = routes config.process.params.lat = 45. config.process.params.lon = -71. data_process_id = self.data_process_management.create_data_process2(in_data_product_ids=[input1, input2], out_data_product_ids=[density_dp_id, salinity_dp_id], configuration=config) self.addCleanup(self.data_process_management.delete_data_process2, data_process_id) self.data_process_management.activate_data_process2(data_process_id) self.addCleanup(self.data_process_management.deactivate_data_process2, data_process_id) density_validated = Event() salinity_validated = Event() def density_validation(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) np.testing.assert_array_almost_equal(rdt['density'], np.array([1021.6839775385847]), decimal=4) density_validated.set() def salinity_validation(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) np.testing.assert_array_almost_equal(rdt['salinity'], np.array([30.93513240786831]), decimal=4) salinity_validated.set() self.setup_subscriber(density_dp_id, callback=density_validation) self.setup_subscriber(salinity_dp_id, callback=salinity_validation) self.publish_to_plain_data_product(input1) self.assertTrue(density_validated.wait(10)) self.assertTrue(salinity_validated.wait(10)) density_validated.clear() salinity_validated.clear() self.publish_to_plain_data_product(input2) self.assertTrue(density_validated.wait(10)) self.assertFalse(salinity_validated.wait(0.75)) density_validated.clear() salinity_validated.clear() def test_visual_transform(self): input_data_product_id = self.ctd_plain_input_data_product() output_data_product_id = self.google_dt_data_product() dpd = DataProcessDefinition(name='visual transform') dpd.data_process_type = DataProcessTypeEnum.TRANSFORM dpd.module = 'ion.processes.data.transforms.viz.google_dt' dpd.class_name = 'VizTransformGoogleDT' #-------------------------------------------------------------------------------- # Walk before we base jump #-------------------------------------------------------------------------------- data_process_definition_id = self.data_process_management.create_data_process_definition(dpd) self.addCleanup(self.data_process_management.delete_data_process_definition, data_process_definition_id) data_process_id = self.data_process_management.create_data_process2(data_process_definition_id=data_process_definition_id, in_data_product_ids=[input_data_product_id], out_data_product_ids=[output_data_product_id]) self.addCleanup(self.data_process_management.delete_data_process2,data_process_id) self.data_process_management.activate_data_process2(data_process_id) self.addCleanup(self.data_process_management.deactivate_data_process2, data_process_id) validated = Event() def validation(msg, route, stream_id): rdt = RecordDictionaryTool.load_from_granule(msg) self.assertTrue(rdt['google_dt_components'] is not None) validated.set() self.setup_subscriber(output_data_product_id, callback=validation) self.publish_to_plain_data_product(input_data_product_id) self.assertTrue(validated.wait(10))
def test_createDataProduct(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' # Establish endpoint with container container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) #print 'got CC client' container_client.start_rel_from_url('res/deploy/r2sa.yml') print 'started services' # Now create client to DataProductManagementService client = DataProductManagementServiceClient(node=self.container.node) # test creating a new data product w/o a data producer print 'Creating new data product w/o a data producer' dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') try: dp_id = client.create_data_product(dp_obj) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) print 'new dp_id = ', dp_id # test creating a duplicate data product print 'Creating the same data product a second time (duplicate)' dp_obj.description = 'the first dp' try: dp_id = client.create_data_product(dp_obj) except BadRequest as ex: print ex else: self.fail("duplicate data product was created with the same name") """ # This is broken until the interceptor handles lists properly (w/o converting them to constants) # and DAMS works with pubsub_management.register_producer() correctly # test creating a new data product with a data producer print 'Creating new data product with a data producer' dp_obj = IonObject(RT.DataProduct, name='DP2', description='another new dp') data_producer_obj = IonObject(RT.DataProducer, name='DataProducer1', description='a new data producer') try: dp_id = client.create_data_product(dp_obj, data_producer_obj) except BadRequest as ex: self.fail("failed to create new data product") print 'new dp_id = ', dp_id """ # test reading a non-existent data product print 'reading non-existent data product' try: dp_obj = client.read_data_product('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data product was found during read: %s" %dp_obj) # update a data product (tests read also) print 'Updating data product' # first get the existing dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry try: update_result = client.update_data_product(dp_obj) except NotFound as ex: self.fail("existing data product was not found during update") except Conflict as ex: self.fail("revision conflict exception during data product update") else: self.assertTrue(update_result == True) # now get the dp back to see if it was updated try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj self.assertTrue(dp_obj.description == 'the very first dp') # now 'delete' the data product print "deleting data product" try: delete_result = client.delete_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during delete") self.assertTrue(delete_result == True) # now try to get the deleted dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: pass else: self.fail("deleted data product was found during read") # now try to delete the already deleted dp object print "deleting non-existing data product" try: delete_result = client.delete_data_product(dp_id) except NotFound as ex: pass else: self.fail("non-existing data product was found during delete")
class VisStreamLauncher(ImmediateProcess): """ Class emulates a stream source from a NetCDF file. It emits a record of data every few seconds on a stream identified by a routing key. """ def on_init(self): log.debug("VizStreamProducer init. Self.id=%s" % self.id) def on_start(self): log.debug("VizStreamProducer start") self.data_source_name = self.CFG.get('name') self.dataset = self.CFG.get('dataset') # create a pubsub client and a resource registry client self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) # Dummy instrument related clients self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.IngestClient = IngestionManagementServiceClient( node=self.container.node) # create the pubsub client self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) # Additional code for creating a dummy instrument """ # Set up the preconditions. Look for an existing ingestion config while True: log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.") ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True) if len(ingestion_cfgs) > 0: break else: gevent.sleep(1) """ # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash) dp_ids, _ = self.rrclient.find_resources(RT.DataProduct, None, self.data_source_name, True) if len(dp_ids) > 0: data_product_id = dp_ids[0] print '>>>>>>>>>>>>> Found dp_id = ', data_product_id else: # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name=self.data_source_name, description=self.data_source_name, model_label=self.data_source_name) instModel_id = self.imsclient.create_instrument_model( instModel_obj) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name=self.data_source_name, description=self.data_source_name, serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) # create a stream definition for the data from the ctd simulator ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.pubsubclient.create_stream_definition( container=ctd_stream_def) print 'Creating new CDM data product with a stream definition' dp_obj = IonObject(RT.DataProduct, name=self.data_source_name, description='ctd stream test') data_product_id = self.dpclient.create_data_product( dp_obj, ctd_stream_def_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=data_product_id) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id, persist_data=True, persist_metadata=True) print '>>>>>>>>>>>> New dp_id = ', data_product_id # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True) if self.dataset == 'sinusoidal': pid = self.container.spawn_process( name='ctd_test.' + self.data_source_name, module='ion.processes.data.sinusoidal_stream_publisher', cls='SinusoidalCtdPublisher', config={'process': { 'stream_id': stream_ids[0] }}) else: pid = self.container.spawn_process( name='ctd_test.' + self.data_source_name, module='ion.processes.data.ctd_stream_publisher', cls='SimpleCtdPublisher', config={'process': { 'stream_id': stream_ids[0] }}) def on_quit(self): log.debug("VizStreamProducer quit")
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.resource_impl = ResourceImpl(self.c) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.resource_impl.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: deployment_obj = self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') model_id = self.imsclient.create_platform_model(platform_model__obj) #------------------------------------------------------------------------------------- # Assign platform model to platform device and site #------------------------------------------------------------------------------------- self.imsclient.assign_platform_model_to_platform_device(model_id, platform_device_id) self.omsclient.assign_platform_model_to_platform_site(model_id, site_id) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='Log Data Product', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) out_log_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id) #---------------------------------------------------------------------------------------------------- # Start the transform (a logical transform) that acts as an instrument site #---------------------------------------------------------------------------------------------------- self.omsclient.create_site_data_product( site_id= instrument_site_id, data_product_id = out_log_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject(RT.DataProduct, name='Instrument Data Product', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) inst_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id) #assign data products appropriately self.damsclient.assign_data_product(input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(instrument_model_id, instrument_site_id) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instrument_device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) self.omsclient.activate_deployment(deployment_id)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dms_cli = DatasetManagementServiceClient() dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent(name='example data agent', handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls']) eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance( name='example dataset agent instance') eda_inst_id = dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(name='example data provider', institution=Institution(), contact=ContactInformation()) dprov.contact.individual_names_given = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(name='example datasource', protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.individual_names_given = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'ruv_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['base_url'] = 'test_data/ruv/' dset.dataset_description.parameters[ 'list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv' dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M' dset.dataset_description.parameters[ 'date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [] # Create DataSourceModel dsrc_model = DataSourceModel(name='ruv_model') #dsrc_model.model = 'RUV' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set( external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) pdict = ParameterDictionary() t_ctxt = ParameterContext( 'data', param_type=QuantityType(value_encoding=numpy.dtype('int64'))) t_ctxt.axis = AxisTypeEnum.TIME t_ctxt.uom = 'seconds since 01-01-1970' pdict.add_context(t_ctxt) #create temp streamdef so the data product can create the stream pc_list = [] for pc_k, pc in pdict.iteritems(): pc_list.append(dms_cli.create_parameter_context( pc_k, pc[1].dump())) pdict_id = dms_cli.create_parameter_dictionary('ruv_param_dict', pc_list) streamdef_id = pubsub_cli.create_stream_definition( name="ruv", description="stream def for ruv testing", parameter_dictionary_id=pdict_id) dprod = IonObject(RT.DataProduct, name='ruv_parsed_product', description='parsed ruv product') # Generate the data product and associate it to the ExternalDataset dproduct_id = dpms_cli.create_data_product( data_product=dprod, stream_definition_id=streamdef_id) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({ 'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id })) #CBM: Eventually, probably want to group this crap somehow - not sure how yet... # Create the logger for receiving publications _, stream_route, _ = self.create_stream_and_logger(name='ruv', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'stream_route': stream_route, 'external_dataset_res': dset, 'param_dictionary': pdict.dump(), 'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 20, }
class TestAgentLaunchOps(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return def test_get_agent_client_noprocess(self): inst_device_id = self.RR2.create(any_old(RT.InstrumentDevice)) iap = ResourceAgentClient._get_agent_process_id(inst_device_id) # should be no running agent self.assertIsNone(iap) # should raise NotFound self.assertRaises(NotFound, ResourceAgentClient, inst_device_id) def test_resource_state_save_restore(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.IMS.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' ) parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations = [raw_config, parsed_config] ) instAgent_id = self.IMS.create_instrument_agent(instAgent_obj) log.debug( 'new InstrumentAgent id = %s', instAgent_id) self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 ' + '(SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj) self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.DP.activate_data_product_persistence(data_product_id=data_product_id1) self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug( 'Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug( 'new dp_id = %s', str(data_product_id2)) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.DP.activate_data_product_persistence(data_product_id=data_product_id2) self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id2) # spin up agent self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.IMS.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id) gate = AgentProcessStateGate(self.PDC.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) # take snapshot of config snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot") snap_obj = self.RR.read_attachment(snap_id, include_content=True) #modify config instance_obj.driver_config["comms_config"] = "BAD_DATA" self.RR.update(instance_obj) #restore config self.IMS.restore_resource_state(instDevice_id, snap_id) instance_obj = self.RR.read(instAgentInstance_id) if "BAD_DATA" == instance_obj.driver_config["comms_config"]: print "Saved config:" print snap_obj.content self.fail("Saved config was not properly restored") self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"]) self.DP.delete_data_product(data_product_id1) self.DP.delete_data_product(data_product_id2) def test_agent_instance_config_hasDevice(self): def assign_fn(child_device_id, parent_device_id): self.RR2.create_association(parent_device_id, PRED.hasDevice, child_device_id) def find_fn(parent_device_id): ret, _ = self.RR.find_objects(subject=parent_device_id, predicate=PRED.hasDevice, id_only=True) return ret self.base_agent_instance_config(assign_fn, find_fn) log.info("END test_agent_instance_config_hasDevice") def test_agent_instance_config_hasNetworkParent(self): def assign_fn(child_device_id, parent_device_id): self.RR2.create_association(child_device_id, PRED.hasNetworkParent, parent_device_id) def find_fn(parent_device_id): ret, _ = self.RR.find_subjects(object=parent_device_id, predicate=PRED.hasNetworkParent, id_only=True) return ret self.base_agent_instance_config(assign_fn, find_fn) log.info("END test_agent_instance_config_hasNetworkParent") def base_agent_instance_config(self, assign_child_platform_to_parent_platform_fn, find_child_platform_ids_of_parent_platform_fn): """ Verify that agent configurations are being built properly """ clients = DotDict() clients.resource_registry = self.RR clients.pubsub_management = self.PSC clients.dataset_management = self.DSC config_builder = DotDict config_builder.i = None config_builder.p = None def refresh_pconfig_builder_hack(config_builder): """ ugly hack to get around "idempotent" RR2 caching remove after https://github.com/ooici/coi-services/pull/1190 """ config_builder.p = PlatformAgentConfigurationBuilder(clients) def refresh_iconfig_builder_hack(config_builder): """ ugly hack to get around "idempotent" RR2 caching remove after https://github.com/ooici/coi-services/pull/1190 """ config_builder.i = InstrumentAgentConfigurationBuilder(clients) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() org_obj = any_old(RT.Org) org_id = self.RR2.create(org_obj) inst_startup_config = {'startup': 'config'} generic_alerts_config = [ {'lvl2': 'lvl3val'} ] required_config_keys = [ 'org_governance_name', 'device_type', 'agent', 'driver_config', 'stream_config', 'startup_config', 'aparam_alerts_config', 'children'] def verify_instrument_config(config, device_id): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.org_governance_name, config['org_governance_name']) self.assertEqual(RT.InstrumentDevice, config['device_type']) self.assertIn('driver_config', config) driver_config = config['driver_config'] expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',), } for k, v in expected_driver_fields.iteritems(): self.assertIn(k, driver_config) self.assertEqual(v, driver_config[k]) self.assertEqual self.assertEqual({'resource_id': device_id}, config['agent']) self.assertEqual(inst_startup_config, config['startup_config']) self.assertIn('aparam_alerts_config', config) self.assertEqual(generic_alerts_config, config['aparam_alerts_config']) self.assertIn('stream_config', config) for key in ['children']: self.assertEqual({}, config[key]) def verify_child_config(config, device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.org_governance_name, config['org_governance_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertEqual({'resource_id': device_id}, config['agent']) self.assertIn('aparam_alerts_config', config) self.assertEqual(generic_alerts_config, config['aparam_alerts_config']) self.assertIn('stream_config', config) self.assertIn('driver_config', config) self.assertIn('foo', config['driver_config']) self.assertIn('ports', config['driver_config']) self.assertEqual('bar', config['driver_config']['foo']) self.assertIn('process_type', config['driver_config']) self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type']) if None is inst_device_id: for key in ['children', 'startup_config']: self.assertEqual({}, config[key]) else: for key in ['startup_config']: self.assertEqual({}, config[key]) self.assertIn(inst_device_id, config['children']) verify_instrument_config(config['children'][inst_device_id], inst_device_id) if config['driver_config']['ports']: self.assertTrue( isinstance(config['driver_config']['ports'], dict) ) def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.org_governance_name, config['org_governance_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertIn('process_type', config['driver_config']) self.assertIn('ports', config['driver_config']) self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type']) self.assertEqual({'resource_id': parent_device_id}, config['agent']) self.assertIn('aparam_alerts_config', config) self.assertEqual(generic_alerts_config, config['aparam_alerts_config']) self.assertIn('stream_config', config) for key in ['startup_config']: self.assertEqual({}, config[key]) if config['driver_config']['ports']: self.assertTrue( isinstance(config['driver_config']['ports'], dict) ) self.assertIn(child_device_id, config['children']) verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #todo: create org and figure out which agent resource needs to get assigned to it def _make_platform_agent_structure(name='', agent_config=None): if None is agent_config: agent_config = {} # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'}, 'alerts': generic_alerts_config}) platform_agent_instance_obj.agent_config = agent_config platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' ) platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]}) platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj) # device creation platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.DP.suspend_data_product_persistence, dp_id) #deployment creation site_obj = IonObject(RT.PlatformSite, name='sitePlatform') site_id = self.OMS.create_platform_site(platform_site=site_obj) # find current deployment using time constraints current_time = int( calendar.timegm(time.gmtime()) ) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-09-CTDMO0999', port_type=PortTypeEnum.UPLINK, ip_address=0) deployment_obj = IonObject(RT.Deployment, name='TestPlatformDeployment_' + name, description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds], port_assignments={platform_device_id:platform_port_obj}) deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=platform_device_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(platform_agent_instance_id, platform_device_id) self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id) return platform_agent_instance_id, platform_agent_id, platform_device_id def _make_instrument_agent_structure(agent_config=None): if None is agent_config: agent_config = {} # instance creation instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config, 'alerts': generic_alerts_config}) instrument_agent_instance_obj.agent_config = agent_config instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' ) instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]}) instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj) # device creation instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.DP.suspend_data_product_persistence, dp_id) #deployment creation site_obj = IonObject(RT.InstrumentSite, name='siteInstrument') site_id = self.OMS.create_instrument_site(instrument_site =site_obj) # find current deployment using time constraints current_time = int( calendar.timegm(time.gmtime()) ) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-08-CTDMO0888', port_type=PortTypeEnum.PAYLOAD, ip_address=0) deployment_obj = IonObject(RT.Deployment, name='TestDeployment for Cabled Instrument', description='some new deployment', context=IonObject(OT.CabledInstrumentDeploymentContext), constraint_list=[temporal_bounds], port_assignments={instrument_device_id:platform_port_obj}) deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=instrument_device_id) # assignments self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(instrument_agent_instance_id, instrument_device_id) self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(instrument_agent_id, instrument_agent_instance_id) self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id) return instrument_agent_instance_id, instrument_agent_id, instrument_device_id # can't do anything without an agent instance obj log.debug("Testing that preparing a launcher without agent instance raises an error") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated self.assertRaises(AssertionError, config_builder.p.prepare, will_launch=False) log.debug("Making the structure for a platform agent, which will be the child") platform_agent_instance_child_id, _, platform_device_child_id = _make_platform_agent_structure(name='child') platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id) log.debug("Preparing a valid agent instance launch, for config only") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_child_obj) child_config = config_builder.p.prepare(will_launch=False) verify_child_config(child_config, platform_device_child_id) log.debug("Making the structure for a platform agent, which will be the parent") platform_agent_instance_parent_id, _, platform_device_parent_id = _make_platform_agent_structure(name='parent') platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id) log.debug("Testing child-less parent as a child config") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = config_builder.p.prepare(will_launch=False) verify_child_config(parent_config, platform_device_parent_id) log.debug("assigning child platform to parent") assign_child_platform_to_parent_platform_fn(platform_device_child_id, platform_device_parent_id) child_device_ids = find_child_platform_ids_of_parent_platform_fn(platform_device_parent_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing parent + child as parent config") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = config_builder.p.prepare(will_launch=False) verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id) log.debug("making the structure for an instrument agent") instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure() instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id) log.debug("Testing instrument config") refresh_iconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.i.set_agent_instance_object(instrument_agent_instance_obj) instrument_config = config_builder.i.prepare(will_launch=False) verify_instrument_config(instrument_config, instrument_device_id) log.debug("assigning instrument to platform") self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id, platform_device_child_id) child_device_ids = self.RR2.find_instrument_device_ids_of_platform_device_using_has_device(platform_device_child_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing entire config") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj) full_config = config_builder.p.prepare(will_launch=False) verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id) #self.fail(parent_config) #plauncher.prepare(will_launch=False) log.info("END base_agent_instance_config")
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore(datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition(name='ingestion worker') ingestion_worker_definition.executable = { 'module':'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class' :'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid) self.addCleanup(self.cleaning_up) def cleaning_up(self): for pid in self.pids: log.debug("number of pids to be terminated: %s", len(self.pids)) try: self.process_dispatcher.cancel_process(pid) log.debug("Terminated the process: %s", pid) except: log.debug("could not terminate the process id: %s" % pid) IngestionManagementIntTest.clean_subscriptions() for xn in self.exchange_names: xni = self.container.ex_manager.create_xn_queue(xn) xni.delete() for xp in self.exchange_points: xpi = self.container.ex_manager.create_xp(xp) xpi.delete() def get_datastore(self, dataset_id): dataset = self.dataset_management.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) # self.assertTrue(len(events) > 0) def test_data_product_stream_def(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id) self.assertEquals(ctd_stream_def_id, stream_def_id) def test_activate_suspend_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test activate and suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) self.get_datastore(dataset_ids[0]) # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data = self.data_retriever.retrieve(dataset_ids[0]) self.assertIsInstance(replay_data, Granule) log.debug("The data retriever was able to replay the dataset that was attached to the data product " "we wanted to be persisted. Therefore the data product was indeed persisted with " "otherwise we could not have retrieved its dataset using the data retriever. Therefore " "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'") data_product_object = self.rrclient.read(dp_id) self.assertEquals(data_product_object.name,'DP1') self.assertEquals(data_product_object.description,'some new dp') log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. " " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the " "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name, data_product_object.description)) #------------------------------------------------------------------------------------------------ # test suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.suspend_data_product_persistence(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.rrclient.read(dp_id)
class BulkIngestBase(object): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub_management = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node) self.resource_registry = self.container.resource_registry self.context_ids = self.build_param_contexts() self.setup_resources() def build_param_contexts(self): raise NotImplementedError('build_param_contexts must be implemented in child classes') def create_external_dataset(self): raise NotImplementedError('create_external_dataset must be implemented in child classes') def get_dvr_config(self): raise NotImplementedError('get_dvr_config must be implemented in child classes') def get_retrieve_client(self, dataset_id=''): raise NotImplementedError('get_retrieve_client must be implemented in child classes') def test_data_ingest(self): self.pdict_id = self.create_parameter_dict(self.name) self.stream_def_id = self.create_stream_def(self.name, self.pdict_id) self.data_product_id = self.create_data_product(self.name, self.description, self.stream_def_id) self.dataset_id = self.get_dataset_id(self.data_product_id) self.stream_id, self.route = self.get_stream_id_and_route(self.data_product_id) self.external_dataset_id = self.create_external_dataset() self.data_producer_id = self.register_external_dataset(self.external_dataset_id) self.start_agent() def create_parameter_dict(self, name=''): return self.dataset_management.create_parameter_dictionary(name=name, parameter_context_ids=self.context_ids, temporal_context='time') def create_stream_def(self, name='', pdict_id=''): return self.pubsub_management.create_stream_definition(name=name, parameter_dictionary_id=pdict_id) def create_data_product(self, name='', description='', stream_def_id=''): tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = DataProduct( name=name, description=description, processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id) self.data_product_management.activate_data_product_persistence(data_product_id) return data_product_id def register_external_dataset(self, external_dataset_id=''): return self.data_acquisition_management.register_external_data_set(external_dataset_id=external_dataset_id) def get_dataset_id(self, data_product_id=''): dataset_ids, assocs = self.resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True) return dataset_ids[0] def get_stream_id_and_route(self, data_product_id): stream_ids, _ = self.resource_registry.find_objects(data_product_id, PRED.hasStream, RT.Stream, id_only=True) stream_id = stream_ids[0] route = self.pubsub_management.read_stream_route(stream_id) #self.create_logger(self.name, stream_id) return stream_id, route def start_agent(self): agent_config = { 'driver_config': self.get_dvr_config(), 'stream_config': {}, 'agent': {'resource_id': self.external_dataset_id}, 'test_mode': True } _ia_pid = self.container.spawn_process( name=self.EDA_NAME, module=self.EDA_MOD, cls=self.EDA_CLS, config=agent_config) self._ia_client = ResourceAgentClient(self.external_dataset_id, process=FakeProcess()) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) self._ia_client.execute_agent(cmd) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) self._ia_client.execute_agent(cmd) cmd = AgentCommand(command=ResourceAgentEvent.RUN) self._ia_client.execute_agent(cmd) cmd = AgentCommand(command=DriverEvent.START_AUTOSAMPLE) self._ia_client.execute_resource(command=cmd) self.start_listener(self.dataset_id) def stop_agent(self): cmd = AgentCommand(command=DriverEvent.STOP_AUTOSAMPLE) self._ia_client.execute_resource(cmd) cmd = AgentCommand(command=ResourceAgentEvent.RESET) self._ia_client.execute_agent(cmd) def start_listener(self, dataset_id=''): dataset_modified = Event() #callback to use retrieve to get data from the coverage def cb(*args, **kwargs): self.get_retrieve_client(dataset_id=dataset_id) #callback to keep execution going once dataset has been fully ingested def cb2(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id) es.start() es2 = EventSubscriber(event_type=OT.DeviceCommonLifecycleEvent, callback=cb2, origin='BaseDataHandler._acquire_sample') es2.start() self.addCleanup(es.stop) self.addCleanup(es2.stop) #let it go for up to 120 seconds, then stop the agent and reset it dataset_modified.wait(120) self.stop_agent() def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name+'_logger') producer_definition.executable = { 'module':'ion.processes.data.stream_granule_logger', 'class':'StreamGranuleLogger' } logger_procdef_id = self.process_dispatch_client.create_process_definition(process_definition=producer_definition) configuration = { 'process':{ 'stream_id':stream_id, } } pid = self.process_dispatch_client.schedule_process(process_definition_id=logger_procdef_id, configuration=configuration) return pid
class TestDataProductManagementServiceCoverage(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() log.debug("Start rel from url") self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.DPMS = DataProductManagementServiceClient() self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.DAMS = DataAcquisitionManagementServiceClient() self.PSMS = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.PD = ProcessDispatcherServiceClient() self.DSMS = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ log.debug("get datastore") datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.PSMS.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.PD.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] self.addCleanup(self.cleaning_up) @staticmethod def clean_subscriptions(): ingestion_management = IngestionManagementServiceClient() pubsub = PubsubManagementServiceClient() rr = ResourceRegistryServiceClient() ingestion_config_ids = ingestion_management.list_ingestion_configurations( id_only=True) for ic in ingestion_config_ids: subscription_ids, assocs = rr.find_objects( subject=ic, predicate=PRED.hasSubscription, id_only=True) for subscription_id, assoc in zip(subscription_ids, assocs): rr.delete_association(assoc) try: pubsub.deactivate_subscription(subscription_id) except: log.exception("Unable to decativate subscription: %s", subscription_id) pubsub.delete_subscription(subscription_id) def cleaning_up(self): for pid in self.pids: log.debug("number of pids to be terminated: %s", len(self.pids)) try: self.PD.cancel_process(pid) log.debug("Terminated the process: %s", pid) except: log.debug("could not terminate the process id: %s" % pid) TestDataProductManagementServiceCoverage.clean_subscriptions() for xn in self.exchange_names: xni = self.container.ex_manager.create_xn_queue(xn) xni.delete() for xp in self.exchange_points: xpi = self.container.ex_manager.create_xp(xp) xpi.delete() def test_CRUD_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.DSMS.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict') ctd_stream_def_id = self.PSMS.create_stream_definition( name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ log.debug("create dataset") dataset_id = self.RR2.create(any_old(RT.Dataset)) log.debug("dataset_id = %s", dataset_id) log.debug("create data product 1") dp_id = self.DPMS.create_data_product( data_product=dp_obj, stream_definition_id=ctd_stream_def_id, dataset_id=dataset_id) log.debug("dp_id = %s", dp_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.RR.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) log.debug("read data product") dp_obj = self.DPMS.read_data_product(dp_id) log.debug("find data products") self.assertIn(dp_id, [r._id for r in self.DPMS.find_data_products()]) log.debug("update data product") dp_obj.name = "brand new" self.DPMS.update_data_product(dp_obj) self.assertEqual("brand new", self.DPMS.read_data_product(dp_id).name) log.debug("activate/suspend persistence") self.assertFalse(self.DPMS.is_persisted(dp_id)) self.DPMS.activate_data_product_persistence(dp_id) self.addCleanup(self.DPMS.suspend_data_product_persistence, dp_id) # delete op will do this for us self.assertTrue(self.DPMS.is_persisted(dp_id)) log.debug("check error on checking persistence of nonexistent stream") #with self.assertRaises(NotFound): if True: self.DPMS.is_persisted(self.RR2.create(any_old(RT.DataProduct))) #log.debug("get extension") #self.DPMS.get_data_product_extension(dp_id) log.debug("getting last update") lastupdate = self.DPMS.get_last_update(dp_id) self.assertEqual( {}, lastupdate) # should be no updates to a new data product log.debug("prepare resource support") support = self.DPMS.prepare_data_product_support(dp_id) self.assertIsNotNone(support) log.debug("delete data product") self.DPMS.delete_data_product(dp_id) log.debug("try to suspend again") self.DPMS.suspend_data_product_persistence(dp_id) # try basic create log.debug("create without a dataset") dp_id2 = self.DPMS.create_data_product_(any_old(RT.DataProduct)) self.assertIsNotNone(dp_id2) log.debug("activate product %s", dp_id2) self.assertRaises(BadRequest, self.DPMS.activate_data_product_persistence, dp_id2) #self.assertNotEqual(0, len(self.RR2.find_dataset_ids_of_data_product_using_has_dataset(dp_id2))) log.debug("force delete data product") self.DPMS.force_delete_data_product(dp_id2) log.debug("test complete")
class TestDataProductProvenance(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.dpmsclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # create missing data process definition dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dataprocessclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip('not ready') def test_get_provenance(self): #create a deployment with metadata and an initial site and device instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "") log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id)) # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" ) try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" %ex) log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id)) self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id) # Create InstrumentAgent parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 ) instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg", stream_configurations = [parsed_config] ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" %ex) log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) try: instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" %ex) log.debug("test_get_provenance: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) #------------------------------- # Create CTD Parsed data product #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', ctd_parsed_data_product) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product) #------------------------------- # create a data product for the site to pass the OMS check.... we need to remove this check #------------------------------- dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id) self.omsclient.create_site_data_product(instrument_site_id, log_data_product_id) #------------------------------- # Deploy instrument device to instrument site #------------------------------- deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instDevice_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) self.omsclient.activate_deployment(deployment_id) inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False) log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) ) #------------------------------- # Create the agent instance #------------------------------- port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition ctd_L0_all") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new ctd_L0_all data process definition: %s" %ex) #------------------------------- # L1 Conductivity: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_conductivity', description='create the L1 conductivity data product', module='ion.processes.data.transforms.ctd.ctd_L1_conductivity', class_name='CTDL1ConductivityTransform') try: ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex) #------------------------------- # L1 Pressure: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_pressure', description='create the L1 pressure data product', module='ion.processes.data.transforms.ctd.ctd_L1_pressure', class_name='CTDL1PressureTransform') try: ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex) #------------------------------- # L1 Temperature: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_temperature', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L1_temperature', class_name='CTDL1TemperatureTransform') try: ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex) #------------------------------- # L2 Salinity: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition SalinityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_salinity', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_salinity', class_name='SalinityTransform') try: ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new SalinityTransform data process definition: %s" %ex) #------------------------------- # L2 Density: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition DensityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_density', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_density', class_name='DensityTransform') try: ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new DensityTransform data process definition: %s" %ex) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' ) outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' ) outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' ) self.output_products={} log.debug("TestDataProductProvenance: create output data product L0 conductivity") ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id log.debug("TestDataProductProvenance: create output data product L0 pressure") ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) self.output_products['pressure'] = ctd_l0_pressure_output_dp_id log.debug("TestDataProductProvenance: create output data product L0 temperature") ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) self.output_products['temperature'] = ctd_l0_temperature_output_dp_id #------------------------------- # L1 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' ) outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' ) outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' ) log.debug("TestDataProductProvenance: create output data product L1 conductivity") ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct, name='L1_Conductivity', description='transform output L1 conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj, outgoing_stream_l1_conductivity_id) log.debug("TestDataProductProvenance: create output data product L1 pressure") ctd_l1_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L1_Pressure', description='transform output L1 pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id) log.debug("TestDataProductProvenance: create output data product L1 temperature") ctd_l1_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L1_Temperature', description='transform output L1 temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj, outgoing_stream_l1_temperature_id) #------------------------------- # L2 Salinity - Density: Output Data Products #------------------------------- outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' ) outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' ) log.debug("TestDataProductProvenance: create output data product L2 Salinity") ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct, name='L2_Salinity', description='transform output L2 salinity', temporal_domain = tdom, spatial_domain = sdom) ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id) log.debug("TestDataProductProvenance: create output data product L2 Density") # ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, # name='L2_Density', # description='transform output pressure', # temporal_domain = tdom, # spatial_domain = sdom) # # ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, # outgoing_stream_l2_density_id, # parameter_dictionary) contactInfo = ContactInformation() contactInfo.individual_names_given = "Bill" contactInfo.individual_name_family = "Smith" contactInfo.street_address = "111 First St" contactInfo.city = "San Diego" contactInfo.email = "*****@*****.**" contactInfo.phones = ["858-555-6666"] contactInfo.country = "USA" contactInfo.postal_code = "92123" ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, name='L2_Density', description='transform output pressure', contacts = [contactInfo], iso_topic_category = "my_iso_topic_category_here", quality_control_level = "1", temporal_domain = tdom, spatial_domain = sdom) ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L0 all data_process start") try: ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products) #activate only this data process just for coverage self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) contents = "this is the lookup table contents, replace with a file..." att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att) log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment) log.debug("TestDataProductProvenance: create L0 all data_process return") #------------------------------- # L1 Conductivity: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1 Conductivity data_process start") try: l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_conductivity_dprocdef_id, [ctd_l0_conductivity_output_dp_id], {'conductivity':ctd_l1_conductivity_output_dp_id}) self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L1 Pressure: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1_Pressure data_process start") try: l1_pressure_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id], {'pressure':ctd_l1_pressure_output_dp_id}) self.dataprocessclient.activate_data_process(l1_pressure_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L1 Temperature: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1_Pressure data_process start") try: l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_temperature_dprocdef_id, [ctd_l0_temperature_output_dp_id], {'temperature':ctd_l1_temperature_output_dp_id}) self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L2 Salinity: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L2_salinity data_process start") try: l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_salinity_dprocdef_id, [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], {'salinity':ctd_l2_salinity_output_dp_id}) self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L2 Density: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L2_Density data_process start") try: l2_density_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_density_dprocdef_id, [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], {'density':ctd_l2_density_output_dp_id}) self.dataprocessclient.activate_data_process(l2_density_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id) print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj # Start a resource agent client to talk with the instrument agent. # self._ia_client = ResourceAgentClient('iaclient', name=inst_agent_instance_obj.agent_process_id, process=FakeProcess()) # print 'activate_instrument: got ia client %s', self._ia_client # log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id) self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id) self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id) self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id) self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id) self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id) #------------------------------- # Retrieve the provenance info for the ctd density data product #------------------------------- provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id) log.debug("TestDataProductProvenance: provenance_dict %s", str(provenance_dict)) #validate that products are represented self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)]) density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id]) #------------------------------- # Retrieve the extended resource for this data product #------------------------------- extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id) self.assertEqual(1, len(extended_product.data_processes) ) self.assertEqual(3, len(extended_product.process_input_data_products) ) # log.debug("TestDataProductProvenance: DataProduct provenance_product_list %s", str(extended_product.provenance_product_list)) # log.debug("TestDataProductProvenance: DataProduct data_processes %s", str(extended_product.data_processes)) # log.debug("TestDataProductProvenance: DataProduct process_input_data_products %s", str(extended_product.process_input_data_products)) # log.debug("TestDataProductProvenance: provenance %s", str(extended_product.computed.provenance.value)) #------------------------------- # Retrieve the extended resource for this data process #------------------------------- extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id) # log.debug("TestDataProductProvenance: DataProcess extended_process_def %s", str(extended_process_def)) # log.debug("TestDataProductProvenance: DataProcess data_processes %s", str(extended_process_def.data_processes)) # log.debug("TestDataProductProvenance: DataProcess data_products %s", str(extended_process_def.data_products)) self.assertEqual(1, len(extended_process_def.data_processes) ) self.assertEqual(3, len(extended_process_def.output_stream_definitions) ) self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process #------------------------------- # Request the xml report #------------------------------- results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id) #------------------------------- # Cleanup #------------------------------- self.dpmsclient.delete_data_product(ctd_parsed_data_product) self.dpmsclient.delete_data_product(log_data_product_id) self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
class ExhaustiveParameterTest(IonIntegrationTestCase): def setUp(self): self.i = 0 self._start_container() self.container.start_rel_from_url('res/deploy/r2params.yml') self.dataset_management = DatasetManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.resource_registry = self.container.resource_registry self.data_retriever = DataRetrieverServiceClient() pdicts, _ = self.resource_registry.find_resources( restype='ParameterDictionary', id_only=False) self.dp_ids = [] for pdict in pdicts: stream_def_id = self.pubsub_management.create_stream_definition( pdict.name, parameter_dictionary_id=pdict._id) dp_id = self.make_dp(stream_def_id) if dp_id: self.dp_ids.append(dp_id) def make_dp(self, stream_def_id): tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() stream_def = self.resource_registry.read(stream_def_id) dp_obj = DataProduct(name=stream_def.name, description=stream_def.name, processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) data_product_id = self.data_product_management.create_data_product( dp_obj, stream_definition_id=stream_def_id) self.data_product_management.activate_data_product_persistence( data_product_id) return data_product_id def fill_values(self, ptype, size): if isinstance(ptype, ArrayType): return ['blah'] * size elif isinstance(ptype, QuantityType): return np.sin( np.arange(size, dtype=ptype.value_encoding) * 2 * np.pi / 3) elif isinstance(ptype, RecordType): return [{'record': 'ok'}] * size elif isinstance(ptype, ConstantRangeType): return (1, 1000) elif isinstance(ptype, ConstantType): return np.dtype(ptype.value_encoding).type(1) elif isinstance(ptype, CategoryType): return ptype.categories.keys()[0] else: return def wait_until_we_have_enough_granules(self, dataset_id='', data_size=40): ''' Loops until there is a sufficient amount of data in the dataset ''' done = False with gevent.Timeout(40): while not done: granule = self.data_retriever.retrieve_last_data_points( dataset_id, 1) rdt = RecordDictionaryTool.load_from_granule(granule) extents = self.dataset_management.dataset_extents( dataset_id, rdt._pdict.temporal_parameter_name)[0] if rdt[rdt._pdict.temporal_parameter_name] and rdt[ rdt._pdict. temporal_parameter_name][0] != rdt._pdict.get_context( rdt._pdict.temporal_parameter_name ).fill_value and extents >= data_size: done = True else: gevent.sleep(0.2) def write_to_data_product(self, data_product_id): dataset_ids, _ = self.resource_registry.find_objects(data_product_id, 'hasDataset', id_only=True) dataset_id = dataset_ids.pop() stream_ids, _ = self.resource_registry.find_objects(data_product_id, 'hasStream', id_only=True) stream_id = stream_ids.pop() stream_def_ids, _ = self.resource_registry.find_objects( stream_id, 'hasStreamDefinition', id_only=True) stream_def_id = stream_def_ids.pop() route = self.pubsub_management.read_stream_route(stream_id) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) time_param = rdt._pdict.temporal_parameter_name if time_param is None: print '%s has no temporal parameter' % self.resource_registry.read( data_product_id).name return rdt[time_param] = np.arange(40) for field in rdt.fields: if field == rdt._pdict.temporal_parameter_name: continue rdt[field] = self.fill_values( rdt._pdict.get_context(field).param_type, 40) publisher = StandaloneStreamPublisher(stream_id, route) publisher.publish(rdt.to_granule()) self.wait_until_we_have_enough_granules(dataset_id, 40) granule = self.data_retriever.retrieve(dataset_id) rdt_out = RecordDictionaryTool.load_from_granule(granule) bad = [] for field in rdt.fields: if not np.array_equal(rdt[field], rdt_out[field]): print '%s' % field print '%s != %s' % (rdt[field], rdt_out[field]) bad.append(field) return bad def test_data_products(self): bad_data_products = {} for dp_id in self.dp_ids: try: bad_fields = self.write_to_data_product(dp_id) if bad_fields: bad_data_products[ dp_id] = "Couldn't write and retrieve %s." % bad_fields except: import traceback bad_data_products[dp_id] = traceback.format_exc() for dp_id, tb in bad_data_products.iteritems(): print '----------' print 'Problem with %s' % self.resource_registry.read(dp_id).name print tb print '----------' if bad_data_products: raise AssertionError('There are bad parameter dictionaries.')
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher() # @unittest.skip('this exists only for debugging the launch process') # def test_just_the_setup(self): # return def destroy(self, resource_ids): self.OMS.force_delete_observatory(resource_ids.observatory_id) self.OMS.force_delete_subsite(resource_ids.subsite_id) self.OMS.force_delete_subsite(resource_ids.subsite2_id) self.OMS.force_delete_subsite(resource_ids.subsiteb_id) self.OMS.force_delete_subsite(resource_ids.subsitez_id) self.OMS.force_delete_platform_site(resource_ids.platform_site_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id) self.OMS.force_delete_platform_site(resource_ids.platform_site3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_siteb3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id) #@unittest.skip('targeting') def test_observatory_management(self): resources = self._make_associations() self._do_test_find_related_sites(resources) self._do_test_get_sites_devices_status(resources) self._do_test_find_site_data_products(resources) self._do_test_find_related_frames_of_reference(resources) self._do_test_create_geospatial_point_center(resources) self._do_test_find_observatory_org(resources) self.destroy(resources) def _do_test_find_related_sites(self, resources): site_resources, site_children = self.OMS.find_related_sites(resources.org_id) #import sys, pprint #print >> sys.stderr, pprint.pformat(site_resources) #print >> sys.stderr, pprint.pformat(site_children) #self.assertIn(resources.org_id, site_resources) self.assertIn(resources.observatory_id, site_resources) self.assertIn(resources.subsite_id, site_resources) self.assertIn(resources.subsite_id, site_resources) self.assertIn(resources.subsite2_id, site_resources) self.assertIn(resources.platform_site_id, site_resources) self.assertIn(resources.instrument_site_id, site_resources) self.assertEquals(len(site_resources), 13) self.assertEquals(site_resources[resources.observatory_id].type_, RT.Observatory) self.assertIn(resources.org_id, site_children) self.assertIn(resources.observatory_id, site_children) self.assertIn(resources.subsite_id, site_children) self.assertIn(resources.subsite_id, site_children) self.assertIn(resources.subsite2_id, site_children) self.assertIn(resources.platform_site_id, site_children) self.assertNotIn(resources.instrument_site_id, site_children) self.assertEquals(len(site_children), 9) self.assertIsInstance(site_children[resources.subsite_id], list) self.assertEquals(len(site_children[resources.subsite_id]), 2) def _do_test_get_sites_devices_status(self, resources): result_dict = self.OMS.get_sites_devices_status(resources.org_id) site_resources = result_dict.get("site_resources", None) site_children = result_dict.get("site_children", None) self.assertEquals(len(site_resources), 14) self.assertEquals(len(site_children), 9) result_dict = self.OMS.get_sites_devices_status(resources.org_id, include_devices=True, include_status=True) log.debug("RESULT DICT: %s", result_dict.keys()) site_resources = result_dict.get("site_resources", None) site_children = result_dict.get("site_children", None) site_status = result_dict.get("site_status", None) self.assertEquals(len(site_resources), 14) self.assertEquals(len(site_children), 9) result_dict = self.OMS.get_sites_devices_status(resources.observatory_id, include_devices=True, include_status=True) site_resources = result_dict.get("site_resources") site_children = result_dict.get("site_children") site_status = result_dict.get("site_status") self.assertEquals(len(site_resources), 13) self.assertEquals(len(site_children), 8) def _do_test_find_site_data_products(self, resources): res_dict = self.OMS.find_site_data_products(resources.org_id) #import sys, pprint #print >> sys.stderr, pprint.pformat(res_dict) self.assertIsNone(res_dict['data_product_resources']) self.assertIn(resources.platform_device_id, res_dict['device_data_products']) self.assertIn(resources.instrument_device_id, res_dict['device_data_products']) #@unittest.skip('targeting') def _do_test_find_related_frames_of_reference(self, stuff): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference(resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) self.destroy(stuff) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 <- PlatformDevice, InstrumentDevice2 | Pb <- PlatformDevice b | I <- InstrumentDevice """ org_id = self.OMS.create_marine_facility(any_old(RT.Org)) def create_under_org(resource_type, extra_fields=None): obj = any_old(resource_type, extra_fields) if RT.InstrumentDevice == resource_type: resource_id = self.IMS.create_instrument_device(obj) else: resource_id, _ = self.RR.create(obj) self.OMS.assign_resource_to_observatory_org(resource_id=resource_id, org_id=org_id) return resource_id #stuff we control observatory_id = create_under_org(RT.Observatory) subsite_id = create_under_org(RT.Subsite) subsite2_id = create_under_org(RT.Subsite) subsiteb_id = create_under_org(RT.Subsite) subsitez_id = create_under_org(RT.Subsite) platform_site_id = create_under_org(RT.PlatformSite) platform_siteb_id = create_under_org(RT.PlatformSite) platform_siteb2_id = create_under_org(RT.PlatformSite) platform_site3_id = create_under_org(RT.PlatformSite) instrument_site_id = create_under_org(RT.InstrumentSite) instrument_site2_id = create_under_org(RT.InstrumentSite) instrument_siteb3_id = create_under_org(RT.InstrumentSite) instrument_site4_id = create_under_org(RT.InstrumentSite) #stuff we associate to instrument_device_id = create_under_org(RT.InstrumentDevice) instrument_device2_id = create_under_org(RT.InstrumentDevice) platform_device_id = create_under_org(RT.PlatformDevice) platform_deviceb_id = create_under_org(RT.PlatformDevice) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site(s) self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_siteb_id, PRED.hasDevice, platform_deviceb_id) #test network parent link self.OMS.assign_device_to_network_parent(platform_device_id, platform_deviceb_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site(s) self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) self.RR.create_association(instrument_site2_id, PRED.hasDevice, instrument_device2_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) ret = DotDict() ret.org_id = org_id ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id ret.instrument_device_id = instrument_device_id ret.instrument_device2_id = instrument_device2_id ret.platform_device_id = platform_device_id ret.platform_deviceb_id = platform_deviceb_id ret.instrument_model_id = instrument_model_id ret.platform_model_id = platform_model_id ret.deployment_id = deployment_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') observatory_id = self.OMS.create_observatory(observatory_obj) self.OMS.force_delete_observatory(observatory_id) #@unittest.skip("targeting") def _do_test_create_geospatial_point_center(self, resources): platformsite_obj = IonObject(RT.PlatformSite, name='TestPlatformSite', description='some new TestPlatformSite') geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 20.0 geo_index_obj.geospatial_latitude_limit_south = 10.0 geo_index_obj.geospatial_longitude_limit_east = 15.0 geo_index_obj.geospatial_longitude_limit_west = 20.0 platformsite_obj.constraint_list = [geo_index_obj] platformsite_id = self.OMS.create_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some new TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(15.0, platformsite_obj.geospatial_point_center.lat, places=1) #now adjust a few params platformsite_obj.description = 'some old TestPlatformSite' geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 30.0 geo_index_obj.geospatial_latitude_limit_south = 20.0 platformsite_obj.constraint_list = [geo_index_obj] update_result = self.OMS.update_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some old TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(25.0, platformsite_obj.geospatial_point_center.lat, places=1) self.OMS.force_delete_platform_site(platformsite_id) #@unittest.skip("targeting") def _do_test_find_observatory_org(self, resources): log.debug("Make TestOrg") org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) log.debug("Make Observatory") observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) log.debug("assign observatory to org") self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) log.debug("verify assigment") org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) log.debug("org_id=<" + org_id + ">") log.debug("create a subsite with parent Observatory") subsite_obj = IonObject(RT.Subsite, name= 'TestSubsite', description = 'sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") log.debug("verify that Subsite is linked to Observatory") mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") log.debug("add the Subsite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) log.debug("verify that Subsite is linked to Org") org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") log.debug("create a logical platform with parent Subsite") platform_site_obj = IonObject(RT.PlatformSite, name= 'TestPlatformSite', description = 'sample logical platform') platform_site_id = self.OMS.create_platform_site(platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") log.debug("verify that PlatformSite is linked to Site") site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") log.debug("add the PlatformSite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=platform_site_id, org_id=org_id) log.debug("verify that PlatformSite is linked to Org") org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") log.debug("create a logical instrument with parent logical platform") instrument_site_obj = IonObject(RT.InstrumentSite, name= 'TestInstrumentSite', description = 'sample logical instrument') instrument_site_id = self.OMS.create_instrument_site(instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") log.debug("verify that InstrumentSite is linked to PlatformSite") li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") log.debug("add the InstrumentSite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=instrument_site_id, org_id=org_id) log.debug("verify that InstrumentSite is linked to Org") org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone(org_li_assoc, "InstrumentSite not connected as resource to Org.") log.debug("remove the InstrumentSite as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(instrument_site_id, org_id) log.debug("verify that InstrumentSite is linked to Org") assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True ) self.assertEqual(0, len(assocs)) log.debug("remove the InstrumentSite, association should drop automatically") self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True ) self.assertEqual(0, len(assocs)) log.debug("remove the PlatformSite as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(platform_site_id, org_id) log.debug("verify that PlatformSite is linked to Org") assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True ) self.assertEqual(0, len(assocs)) log.debug("remove the Site as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) log.debug("verify that Site is linked to Org") assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True ) self.assertEqual(0, len(assocs)) self.RR.delete(org_id) self.OMS.force_delete_observatory(observatory_id) self.OMS.force_delete_subsite(subsite_id) self.OMS.force_delete_platform_site(platform_site_id) self.OMS.force_delete_instrument_site(instrument_site_id) @attr('EXT') def test_observatory_extensions(self): obs_id = self.RR2.create(any_old(RT.Observatory)) pss_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="StationSite"))) pas_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="PlatformAssemblySite"))) pcs_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="PlatformComponentSite"))) ins_id = self.RR2.create(any_old(RT.InstrumentSite)) obs_obj = self.RR2.read(obs_id) pss_obj = self.RR2.read(pss_id) pas_obj = self.RR2.read(pas_id) pcs_obj = self.RR2.read(pcs_id) ins_obj = self.RR2.read(ins_id) self.RR2.create_association(obs_id, PRED.hasSite, pss_id) self.RR2.create_association(pss_id, PRED.hasSite, pas_id) self.RR2.create_association(pas_id, PRED.hasSite, pcs_id) self.RR2.create_association(pcs_id, PRED.hasSite, ins_id) extended_obs = self.OMS.get_observatory_site_extension(obs_id, user_id=12345) self.assertEqual([pss_obj], extended_obs.computed.platform_station_sites.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_station_sites.status) self.assertEqual([pas_obj], extended_obs.computed.platform_assembly_sites.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_assembly_sites.status) self.assertEqual([pcs_obj], extended_obs.computed.platform_component_sites.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_component_sites.status) self.assertEqual([ins_obj], extended_obs.computed.instrument_sites.value) extended_pss = self.OMS.get_observatory_site_extension(obs_id, user_id=12345) self.assertEqual([pas_obj], extended_pss.computed.platform_assembly_sites.value) self.assertEqual([pcs_obj], extended_pss.computed.platform_component_sites.value) self.assertEqual([ins_obj], extended_pss.computed.instrument_sites.value) extended_pas = self.OMS.get_observatory_site_extension(pas_id, user_id=12345) self.assertEqual([pcs_obj], extended_pas.computed.platform_component_sites.value) self.assertEqual([ins_obj], extended_pas.computed.instrument_sites.value) extended_pcs = self.OMS.get_platform_component_site_extension(pcs_id, user_id=12345) self.assertEqual([ins_obj], extended_pcs.computed.instrument_sites.value) #@unittest.skip("in development...") @attr('EXT') @attr('EXT1') def test_observatory_org_extended(self): stuff = self._make_associations() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) self.damsclient.assign_data_product(input_resource_id=stuff.instrument_device_id, data_product_id=data_product_id1) #Create a user to be used as regular member member_actor_obj = IonObject(RT.ActorIdentity, name='org member actor') member_actor_id,_ = self.RR.create(member_actor_obj) assert(member_actor_id) member_actor_header = get_actor_header(member_actor_id) member_user_obj = IonObject(RT.UserInfo, name='org member user') member_user_id,_ = self.RR.create(member_user_obj) assert(member_user_id) self.RR.create_association(subject=member_actor_id, predicate=PRED.hasInfo, object=member_user_id) #Build the Service Agreement Proposal to enroll a user actor sap = IonObject(OT.EnrollmentProposal,consumer=member_actor_id, provider=stuff.org_id ) sap_response = self.org_management_service.negotiate(sap, headers=member_actor_header ) #enroll the member without using negotiation self.org_management_service.enroll_member(org_id=stuff.org_id, actor_id=member_actor_id) #-------------------------------------------------------------------------------- # Get the extended Site (platformSite) #-------------------------------------------------------------------------------- try: extended_site = self.OMS.get_site_extension(stuff.platform_site_id) except: log.error('failed to get extended site', exc_info=True) raise log.debug("extended_site: %r ", extended_site) self.assertEqual(1, len(extended_site.platform_devices)) self.assertEqual(1, len(extended_site.platform_models)) self.assertEqual(stuff.platform_device_id, extended_site.platform_devices[0]._id) self.assertEqual(stuff.platform_model_id, extended_site.platform_models[0]._id) log.debug("verify that PlatformDeviceb is linked to PlatformDevice with hasNetworkParent link") associations = self.RR.find_associations(subject=stuff.platform_deviceb_id, predicate=PRED.hasNetworkParent, object=stuff.platform_device_id, id_only=True) self.assertIsNotNone(associations, "PlatformDevice child not connected to PlatformDevice parent.") #-------------------------------------------------------------------------------- # Get the extended Org #-------------------------------------------------------------------------------- #test the extended resource extended_org = self.OMS.get_marine_facility_extension(stuff.org_id) log.debug("test_observatory_org_extended: extended_org: %s ", str(extended_org)) #self.assertEqual(2, len(extended_org.instruments_deployed) ) #self.assertEqual(1, len(extended_org.platforms_not_deployed) ) self.assertEqual(2, extended_org.number_of_platforms) self.assertEqual(2, len(extended_org.platform_models) ) self.assertEqual(2, extended_org.number_of_instruments) self.assertEqual(2, len(extended_org.instrument_models) ) self.assertEqual(1, len(extended_org.members)) self.assertNotEqual(extended_org.members[0]._id, member_actor_id) self.assertEqual(extended_org.members[0]._id, member_user_id) self.assertEqual(1, len(extended_org.open_requests)) self.assertTrue(len(extended_site.deployments)>0) self.assertEqual(len(extended_site.deployments), len(extended_site.deployment_info)) #test the extended resource of the ION org ion_org_id = self.org_management_service.find_org() extended_org = self.OMS.get_marine_facility_extension(ion_org_id._id, user_id=12345) log.debug("test_observatory_org_extended: extended_ION_org: %s ", str(extended_org)) self.assertEqual(1, len(extended_org.members)) self.assertEqual(0, extended_org.number_of_platforms) #self.assertEqual(1, len(extended_org.sites)) #-------------------------------------------------------------------------------- # Get the extended Site #-------------------------------------------------------------------------------- #create device state events to use for op /non-op filtering in extended t = get_ion_ts() self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device_id, state=ResourceAgentState.STREAMING ) self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device2_id, state=ResourceAgentState.INACTIVE ) extended_site = self.OMS.get_site_extension(stuff.instrument_site2_id) log.debug("test_observatory_org_extended: extended_site: %s ", str(extended_site)) self.dpclient.delete_data_product(data_product_id1)
class RegistrationProcessTest(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.resource_registry = self.container.resource_registry def _make_dataset(self): tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) dataset = Dataset(name='test_dataset') dataset_id = self.dataset_management.create_dataset( dataset, parameter_dictionary_id=parameter_dict_id) return dataset_id @unittest.skip("Array types temporarily unsupported") def test_pydap(self): ph = ParameterHelper(self.dataset_management, self.addCleanup) pdict_id = ph.create_extended_parsed() stream_def_id = self.pubsub_management.create_stream_definition( 'example', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) dp = DataProduct(name='example') data_product_id = self.data_product_management.create_data_product( dp, stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) self.data_product_management.activate_data_product_persistence( data_product_id) self.addCleanup( self.data_product_management.suspend_data_product_persistence, data_product_id) dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0] monitor = DatasetMonitor(dataset_id) self.addCleanup(monitor.stop) rdt = ph.get_rdt(stream_def_id) ph.fill_rdt(rdt, 10) ph.publish_rdt_to_data_product(data_product_id, rdt) self.assertTrue(monitor.wait()) gevent.sleep( 1) # Yield to other greenlets, had an issue with connectivity pydap_host = CFG.get_safe('server.pydap.host', 'localhost') pydap_port = CFG.get_safe('server.pydap.port', 8001) url = 'http://%s:%s/%s' % (pydap_host, pydap_port, data_product_id) for i in xrange( 3 ): # Do it three times to test that the cache doesn't corrupt the requests/responses ds = open_url(url) np.testing.assert_array_equal(list(ds['data']['time']), np.arange(10)) untested = [] for k, v in rdt.iteritems(): if k == rdt.temporal_parameter: continue context = rdt.context(k) if isinstance(context.param_type, QuantityType): np.testing.assert_array_equal(list(ds['data'][k]), rdt[k]) elif isinstance(context.param_type, ArrayType): if context.param_type.inner_encoding is None: values = np.empty(rdt[k].shape, dtype='O') for i, obj in enumerate(rdt[k]): values[i] = str(obj) np.testing.assert_array_equal(list(ds['data'][k]), values) elif len(rdt[k].shape) > 1: values = np.empty(rdt[k].shape[0], dtype='O') for i in xrange(rdt[k].shape[0]): values[i] = ','.join( map(lambda x: str(x), rdt[k][i].tolist())) elif isinstance(context.param_type, ConstantType): np.testing.assert_array_equal(list(ds['data'][k]), rdt[k]) elif isinstance(context.param_type, CategoryType): np.testing.assert_array_equal(list(ds['data'][k]), rdt[k].astype('|S')) else: untested.append('%s (%s)' % (k, context.param_type)) if untested: raise AssertionError('Untested parameters: %s' % untested)
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase): # DataHandler config DVR_CONFIG = {"dvr_mod": "ion.agents.data.handlers.base_data_handler", "dvr_cls": "DummyDataHandler"} def setUp(self): # Start container self._start_container() self.container.start_rel_from_url("res/deploy/r2deploy.yml") log.debug("TestExternalDatasetAgentMgmt: started services") # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) # @unittest.skip('not yet working. fix activate_data_product_persistence()') # @unittest.skip() @unittest.skip("not working") def test_activateDatasetAgent(self): # Create ExternalDatasetModel datsetModel_obj = IonObject( RT.ExternalDatasetModel, name="ExampleDatasetModel", description="ExampleDatasetModel", datset_type="FibSeries", ) try: datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj) except BadRequest as ex: self.fail("failed to create new ExternalDatasetModel: %s" % ex) log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id)) # Create ExternalDatasetAgent datasetAgent_obj = IonObject( RT.ExternalDatasetAgent, name="datasetagent007", description="datasetagent007", handler_module=EDA_MOD, handler_class=EDA_CLS, ) try: datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new ExternalDatasetAgent: %s" % ex) log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id)) # Create ExternalDataset log.debug("TestExternalDatasetAgentMgmt: Create external dataset resource ") extDataset_obj = IonObject(RT.ExternalDataset, name="ExtDataset", description="ExtDataset") try: extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new external dataset resource: %s" % ex) log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s ", str(extDataset_id)) # register the dataset as a data producer dproducer_id = self.damsclient.register_external_data_set(extDataset_id) # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name="SBE37_CDM", parameter_dictionary_id=pdict_id) log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id)) log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition") dp_obj = IonObject(RT.DataProduct, name="eoi dataset data", description=" stream test") tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject( RT.DataProduct, name="DP1", description="some new dp", temporal_domain=tdom, spatial_domain=sdom ) data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id) log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1)) self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1) # todo fix the problem here.... self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids)) stream_id = stream_ids[0] # Build a taxonomy for the dataset tx = TaxyTool() tx.add_taxonomy_set("data", "external_data") # Augment the DVR_CONFIG with the necessary pieces self.DVR_CONFIG["dh_cfg"] = { "TESTING": True, "stream_id": stream_id, # TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members "data_producer_id": dproducer_id, # 'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id "taxonomy": tx.dump(), # TODO: Currently does not support sets "max_records": 4, } # Create agent config. self._stream_config = {} agent_config = { "driver_config": self.DVR_CONFIG, "stream_config": self._stream_config, "agent": {"resource_id": EDA_RESOURCE_ID}, "test_mode": True, } extDatasetAgentInstance_obj = IonObject( RT.ExternalDatasetAgentInstance, name="DatasetAgentInstance", description="DatasetAgentInstance", dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=agent_config, ) extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance( external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id, ) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj)) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id)) # Check that the instance is currently not active id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 1 = %s ", str(id), str(active) ) self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id) dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj)) # now the instance process should be active id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 2 = %s ", str(id), str(active) ) # Start a resource agent client to talk with the instrument agent. self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess()) print "TestExternalDatasetAgentMgmt: got ia client %s", self._dsa_client log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client)) # cmd=AgentCommand(command='initialize') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='go_active') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='run') # _ = self._dsa_client.execute_agent(cmd) # # log.info('Send an unconstrained request for data (\'new data\')') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # log.info('Send a second unconstrained request for data (\'new data\'), should be rejected') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # cmd = AgentCommand(command='reset') # _ = self._dsa_client.execute_agent(cmd) # cmd = AgentCommand(command='get_current_state') # retval = self._dsa_client.execute_agent(cmd) # state = retval.result # TODO: Think about what we really should be testing at this point # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states() # TODO: Do we also need to show data retrieval? cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) cmd = AgentCommand(command="initialize") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.INACTIVE) cmd = AgentCommand(command="go_active") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command="run") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command="pause") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command="resume") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command="clear") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command="run") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command="pause") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command="clear") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command="run") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command="reset") retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command="get_current_state") retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) # ------------------------------- # Deactivate InstrumentAgentInstance # ------------------------------- self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id) def test_dataset_agent_prepare_support(self): eda_sup = self.damsclient.prepare_external_dataset_agent_support() eda_obj = IonObject(RT.ExternalDatasetAgent, name="ExternalDatasetAgent") eda_id = self.damsclient.create_external_dataset_agent(eda_obj) eda_sup = self.damsclient.prepare_external_dataset_agent_support(external_dataset_agent_id=eda_id) edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support() edai_obj = IonObject(RT.ExternalDatasetAgentInstance, name="ExternalDatasetAgentInstance") edai_id = self.damsclient.create_external_dataset_agent_instance(edai_obj) edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support( external_dataset_agent_instance_id=edai_id )
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dms_cli = DatasetManagementServiceClient() dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent(name='example eda',handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls']) eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance(name='example eda instance') eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(name='example data provider', institution=Institution(), contact=ContactInformation()) dprov.contact.individual_names_given = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(name='example datasource', protocol_type='DAP', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.individual_names_given = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'usgs_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # The usgs.nc test dataset is a download of the R1 dataset found here: # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc' dset.dataset_description.parameters['temporal_dimension'] = 'time' dset.dataset_description.parameters['zonal_dimension'] = 'lon' dset.dataset_description.parameters['meridional_dimension'] = 'lat' dset.dataset_description.parameters['vertical_dimension'] = 'z' dset.dataset_description.parameters['variables'] = [ 'water_temperature', 'streamflow', 'water_temperature_bottom', 'water_temperature_middle', 'specific_conductance', 'data_qualifier', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='dap_model') #dsrc_model.model = 'DAP' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) #create temp streamdef so the data product can create the stream pc_list = [] #Get 'time' parameter context pc_list.append(dms_cli.read_parameter_context_by_name('time', id_only=True)) for pc_k, pc in self._create_parameter_dictionary().iteritems(): pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump())) pdict_id = dms_cli.create_parameter_dictionary('netcdf_param_dict', pc_list) #create temp streamdef so the data product can create the stream streamdef_id = pubsub_cli.create_stream_definition(name="netcdf", description="netcdf", parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() tdom, sdom = tdom.dump(), sdom.dump() dprod = IonObject(RT.DataProduct, name='usgs_parsed_product', description='parsed usgs product', temporal_domain=tdom, spatial_domain=sdom) # Generate the data product and associate it to the ExternalDataset dproduct_id = dpms_cli.create_data_product(data_product=dprod, stream_definition_id=streamdef_id) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id})) # Create the logger for receiving publications _, stream_route, _ = self.create_stream_and_logger(name='usgs', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'stream_route': stream_route, 'stream_def': streamdef_id, 'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 1, }
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.dataset_management = self.datasetclient self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node) def test_createDataProcess(self): #--------------------------------------------------------------------------- # Data Process Definition #--------------------------------------------------------------------------- dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) # Make assertion on the newly registered data process definition data_process_definition = self.rrclient.read(dprocdef_id) self.assertEquals(data_process_definition.name, 'ctd_L0_all') self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams') self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all') self.assertEquals(data_process_definition.class_name, 'ctd_L0_all') # Read the data process definition using data process management and make assertions dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id) self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all') self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all') #--------------------------------------------------------------------------- # Create an input instrument #--------------------------------------------------------------------------- instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product') instrument_id, rev = self.rrclient.create(instrument_obj) # Register the instrument so that the data producer and stream object are created data_producer_id = self.damsclient.register_instrument(instrument_id) # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id ) # Assert that the link between the stream definition and the data process definition was done assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True) self.assertIsNotNone(assocs) #--------------------------------------------------------------------------- # Input Data Product #--------------------------------------------------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() input_dp_obj = IonObject( RT.DataProduct, name='InputDataProduct', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test') #Make assertions on the input data product created input_dp_obj = self.rrclient.read(input_dp_id) self.assertEquals(input_dp_obj.name, 'InputDataProduct') self.assertEquals(input_dp_obj.description, 'some new dp') self.damsclient.assign_data_product(instrument_id, input_dp_id) # Retrieve the stream via the DataProduct->Stream associations stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True) self.in_stream_id = stream_ids[0] #--------------------------------------------------------------------------- # Output Data Product #--------------------------------------------------------------------------- outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' ) outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' ) outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' ) self.output_products={} output_dp_obj = IonObject(RT.DataProduct, name='conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id) self.output_products['conductivity'] = output_dp_id_1 output_dp_obj = IonObject(RT.DataProduct, name='pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id) self.output_products['pressure'] = output_dp_id_2 output_dp_obj = IonObject(RT.DataProduct, name='temperature', description='transform output ', temporal_domain = tdom, spatial_domain = sdom) output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id) self.output_products['temperature'] = output_dp_id_3 #--------------------------------------------------------------------------- # Create the data process #--------------------------------------------------------------------------- def _create_data_process(): dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products) return dproc_id dproc_id = _create_data_process() # Make assertions on the data process created data_process = self.dataprocessclient.read_data_process(dproc_id) # Assert that the data process has a process id attached self.assertIsNotNone(data_process.process_id) # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute self.assertIsNotNone(data_process.input_subscription_id) output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True) self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3])) @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}}) def test_createDataProcessUsingSim(self): #------------------------------- # Create InstrumentModel #------------------------------- instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" ) instModel_id = self.imsclient.create_instrument_model(instModel_obj) #------------------------------- # Create InstrumentAgent #------------------------------- instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg") instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) #------------------------------- # Create InstrumentDevice #------------------------------- instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) #------------------------------- # Create InstrumentAgentInstance to hold configuration information #------------------------------- port_agent_config = { 'device_addr': 'sbe37-simulator.oceanobservatories.org', 'device_port': 4001, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'command_port': 4002, 'data_port': 4003, 'log_level': 5, } port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # Create CTD Parsed as the first data product #------------------------------- # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='ctd_parsed', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True) #------------------------------- # Create CTD Raw as the second data product #------------------------------- raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id) dp_obj.name = 'ctd_raw' ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' ) outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' ) outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' ) self.output_products={} ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) self.output_products['pressure'] = ctd_l0_pressure_output_dp_id ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) self.output_products['temperature'] = ctd_l0_temperature_output_dp_id #------------------------------- # Create listener for data process events and verify that events are received. #------------------------------- # todo: add this validate for Req: L4-CI-SA-RQ-367 Data processing shall notify registered data product consumers about data processing workflow life cycle events #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products) data_process = self.rrclient.read(ctd_l0_all_data_process_id) process_id = data_process.process_id self.addCleanup(self.process_dispatcher.cancel_process, process_id) #------------------------------- # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test #------------------------------- gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id) #------------------------------- # Retrieve a list of all data process defintions in RR and validate that the DPD is listed #------------------------------- # todo: Req: L4-CI-SA-RQ-366 Data processing shall manage data topic definitions # todo: data topics are being handled by pub sub at the level of streams self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) #todo: check that activate event is received L4-CI-SA-RQ-367 #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now # todo... that the subscription is indeed activated) (Swarbhanu) # todo: monitor process to see if it is active (sa-rq-182) ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id) input_subscription_id = ctd_l0_all_data_process.input_subscription_id subs = self.rrclient.read(input_subscription_id) self.assertTrue(subs.activated) # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id) #------------------------------- # Retrieve the extended resources for data process definition and for data process #------------------------------- extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id) self.assertEqual(1, len(extended_process_definition.data_processes)) log.debug("test_createDataProcess: extended_process_definition %s", str(extended_process_definition)) extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id) self.assertEqual(1, len(extended_process.input_data_products)) log.debug("test_createDataProcess: extended_process %s", str(extended_process)) ################################ Test the removal of data processes ################################## #------------------------------------------------------------------- # Test the deleting of the data process #------------------------------------------------------------------- # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting # dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id) # input_subscription_id = dp_obj_1.input_subscription_id # out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True) # in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True) # in_streams = [] # for in_prod in in_prods: # streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True) # in_streams.extend(streams) # out_streams = [] # for out_prod in out_prods: # streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True) # out_streams.extend(streams) # Deleting the data process self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id) # Check that the data process got removed. Check the lcs state. It should be retired dp_obj = self.rrclient.read(ctd_l0_all_data_process_id) self.assertEquals(dp_obj.lcstate, LCS.RETIRED) # Check for process defs still attached to the data process dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id, predicate=PRED.hasProcessDefinition, id_only=True) self.assertEquals(len(dpd_assn_ids), 0) # Check for output data product still attached to the data process out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True) self.assertEquals(len(out_products), 0) self.assertEquals(len(assocs), 0) # Check for input data products still attached to the data process inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct) self.assertEquals(len(inprod_associations), 0) # Check for input data products still attached to the data process inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct) self.assertEquals(len(inprod_associations), 0) # Check of the data process has been deactivated self.assertIsNone(dp_obj.input_subscription_id) # Read the original subscription id of the data process and check that it has been deactivated with self.assertRaises(NotFound): self.pubsubclient.read_subscription(input_subscription_id) #------------------------------------------------------------------- # Delete the data process definition #------------------------------------------------------------------- # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition) self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id) # check that the data process definition has been retired dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id) self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED) # Check for old associations of this data process definition proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition) self.assertEquals(len(proc_defs), 0) # find all associations where this is the subject _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True) self.assertEquals(len(obj_assns), 0) ################################ Test the removal of data processes ################################## # Try force delete... This should simply delete the associations and the data process object # from the resource registry #--------------------------------------------------------------------------------------------------------------- # Force deleting a data process #--------------------------------------------------------------------------------------------------------------- self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id) # find all associations where this is the subject _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True) # find all associations where this is the object _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True) self.assertEquals(len(obj_assns), 0) self.assertEquals(len(sbj_assns), 0) with self.assertRaises(NotFound): self.rrclient.read(ctd_l0_all_data_process_id) #--------------------------------------------------------------------------------------------------------------- # Force deleting a data process definition #--------------------------------------------------------------------------------------------------------------- self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id) # find all associations where this is the subject _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True) # find all associations where this is the object _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True) self.assertEquals(len(obj_assns), 0) self.assertEquals(len(sbj_assns), 0) with self.assertRaises(NotFound): self.rrclient.read(ctd_l0_all_data_process_id)
class TestTransformWorkerSubscriptions(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_multi_subscriptions(self): self.dp_list = [] self.event1_verified = Event() self.event2_verified = Event() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct input_dp_obj = IonObject(RT.DataProduct, name='input_data_product_one', description='input test stream one') self.input_dp_one_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) input_dp_obj = IonObject(RT.DataProduct, name='input_data_product_two', description='input test stream two') self.input_dp_two_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) #retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_one_id, PRED.hasStream, RT.Stream, True) self.stream_one_id = stream_ids[0] stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_two_id, PRED.hasStream, RT.Stream, True) self.stream_two_id = stream_ids[0] dpd_id = self.create_data_process_definition() dp1_func_output_dp_id, dp2_func_output_dp_id = self.create_output_data_products( ) first_dp_id = self.create_data_process_one(dpd_id, dp1_func_output_dp_id) second_dp_id = self.create_data_process_two(dpd_id, self.input_dp_two_id, dp2_func_output_dp_id) #retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=first_dp_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) #create subscription to stream ONE, create data process and publish granule on stream ONE #create a queue to catch the published granules of stream ONE self.subscription_one_id = self.pubsub_client.create_subscription( name='parsed_subscription_one', stream_ids=[self.stream_one_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_one_id) self.pubsub_client.activate_subscription(self.subscription_one_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_one_id) stream_route_one = self.pubsub_client.read_stream_route( self.stream_one_id) self.publisher_one = StandaloneStreamPublisher( stream_id=self.stream_one_id, stream_route=stream_route_one) self.start_event_listener() #data process 1 adds conductivity + pressure and puts the result in salinity rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher_one.publish(msg=rdt.to_granule(), stream_id=self.stream_one_id) #retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=second_dp_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) #create subscription to stream ONE and TWO, move TW subscription, create data process and publish granule on stream TWO #create a queue to catch the published granules of stream TWO self.subscription_two_id = self.pubsub_client.create_subscription( name='parsed_subscription_one_two', stream_ids=[self.stream_two_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_two_id) self.pubsub_client.activate_subscription(self.subscription_two_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_two_id) stream_route_two = self.pubsub_client.read_stream_route( self.stream_two_id) self.publisher_two = StandaloneStreamPublisher( stream_id=self.stream_two_id, stream_route=stream_route_two) #data process 1 adds conductivity + pressure and puts the result in salinity rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher_one.publish(msg=rdt.to_granule(), stream_id=self.stream_one_id) #data process 2 adds salinity + pressure and puts the result in conductivity rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [22] rdt['pressure'] = [4] rdt['salinity'] = [1] self.publisher_two.publish(msg=rdt.to_granule(), stream_id=self.stream_two_id) self.assertTrue(self.event2_verified.wait(self.wait_time)) self.assertTrue(self.event1_verified.wait(self.wait_time)) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_two_transforms_inline(self): self.dp_list = [] self.event1_verified = Event() self.event2_verified = Event() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct input_dp_obj = IonObject(RT.DataProduct, name='input_data_product_one', description='input test stream one') self.input_dp_one_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) dpd_id = self.create_data_process_definition() dp1_func_output_dp_id, dp2_func_output_dp_id = self.create_output_data_products( ) first_dp_id = self.create_data_process_one(dpd_id, dp1_func_output_dp_id) second_dp_id = self.create_data_process_two(dpd_id, dp1_func_output_dp_id, dp2_func_output_dp_id) #retrieve subscription from data process one subscription_objs, _ = self.rrclient.find_objects( subject=first_dp_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) #retrieve the Stream for these data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_one_id, PRED.hasStream, RT.Stream, True) self.stream_one_id = stream_ids[0] #the input to data process two is the output from data process one stream_ids, assoc_ids = self.rrclient.find_objects( dp1_func_output_dp_id, PRED.hasStream, RT.Stream, True) self.stream_two_id = stream_ids[0] # Run provenance on the output dataproduct of the second data process to see all the links # are as expected output_data_product_provenance = self.dataproductclient.get_data_product_provenance( dp2_func_output_dp_id) # Do a basic check to see if there were 2 entries in the provenance graph. Parent and Child. self.assertTrue(len(output_data_product_provenance) == 3) # confirm that the linking from the output dataproduct to input dataproduct is correct self.assertTrue( dp1_func_output_dp_id in output_data_product_provenance[dp2_func_output_dp_id]['parents']) self.assertTrue( self.input_dp_one_id in output_data_product_provenance[dp1_func_output_dp_id]['parents']) #create subscription to stream ONE, create data process and publish granule on stream ONE #create a queue to catch the published granules of stream ONE subscription_id = self.pubsub_client.create_subscription( name='parsed_subscription', stream_ids=[self.stream_one_id, self.stream_two_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) stream_route_one = self.pubsub_client.read_stream_route( self.stream_one_id) self.publisher_one = StandaloneStreamPublisher( stream_id=self.stream_one_id, stream_route=stream_route_one) #retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=second_dp_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) #data process 1 adds conductivity + pressure and puts the result in salinity #data process 2 adds salinity + pressure and puts the result in conductivity self.start_event_listener() rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher_one.publish(msg=rdt.to_granule(), stream_id=self.stream_one_id) self.assertTrue(self.event2_verified.wait(self.wait_time)) self.assertTrue(self.event1_verified.wait(self.wait_time)) def create_data_process_definition(self): #two data processes using one transform and one DPD # Set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri= 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='add_array_func') return add_array_dpd_id def create_data_process_one(self, data_process_definition_id, output_dataproduct): # Create the data process #data process 1 adds conductivity + pressure and puts the result in salinity argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "salinity" dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=data_process_definition_id, inputs=[self.input_dp_one_id], outputs=[output_dataproduct], argument_map=argument_map, out_param_name=output_param) self.damsclient.register_process(dp1_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) self.dp_list.append(dp1_data_process_id) return dp1_data_process_id def create_data_process_two(self, data_process_definition_id, input_dataproduct, output_dataproduct): # Create the data process #data process 2 adds salinity + pressure and puts the result in conductivity argument_map = {'arr1': 'salinity', 'arr2': 'pressure'} output_param = 'conductivity' dp2_func_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=data_process_definition_id, inputs=[input_dataproduct], outputs=[output_dataproduct], argument_map=argument_map, out_param_name=output_param) self.damsclient.register_process(dp2_func_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, dp2_func_data_process_id) self.dp_list.append(dp2_func_data_process_id) return dp2_func_data_process_id def create_output_data_products(self): dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition( name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id) dp1_output_dp_obj = IonObject(RT.DataProduct, name='data_process1_data_product', description='output of add array func') dp1_func_output_dp_id = self.dataproductclient.create_data_product( dp1_output_dp_obj, dp1_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_one_id = stream_ids[0] dp2_func_outgoing_stream_id = self.pubsub_client.create_stream_definition( name='dp2_stream', parameter_dictionary_id=self.parameter_dict_id) dp2_func_output_dp_obj = IonObject( RT.DataProduct, name='data_process2_data_product', description='output of add array func') dp2_func_output_dp_id = self.dataproductclient.create_data_product( dp2_func_output_dp_obj, dp2_func_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp2_func_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp2_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_two_id = stream_ids[0] subscription_id = self.pubsub_client.create_subscription( 'validator', data_product_ids=[dp1_func_output_dp_id, dp2_func_output_dp_id]) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) def on_granule(msg, route, stream_id): log.debug('recv_packet stream_id: %s route: %s msg: %s', stream_id, route, msg) self.validate_output_granule(msg, route, stream_id) validator = StandaloneStreamSubscriber('validator', callback=on_granule) validator.start() self.addCleanup(validator.stop) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) return dp1_func_output_dp_id, dp2_func_output_dp_id def validate_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ data_process_event = args[0] log.debug("DataProcessStatusEvent: %s", str(data_process_event.__dict__)) #if data process already created, check origin if not 'data process assigned to transform worker' in data_process_event.description: self.assertIn(data_process_event.origin, self.dp_list) def validate_output_granule(self, msg, route, stream_id): self.assertTrue( stream_id in [self._output_stream_one_id, self._output_stream_two_id]) rdt = RecordDictionaryTool.load_from_granule(msg) log.debug('validate_output_granule stream_id: %s', stream_id) if stream_id == self._output_stream_one_id: sal_val = rdt['salinity'] log.debug('validate_output_granule sal_val: %s', sal_val) np.testing.assert_array_equal(sal_val, np.array([3])) self.event1_verified.set() else: cond_val = rdt['conductivity'] log.debug('validate_output_granule cond_val: %s', cond_val) np.testing.assert_array_equal(cond_val, np.array([5])) self.event2_verified.set() def start_event_listener(self): es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event) es.start() self.addCleanup(es.stop)
class TestTransformWorker(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process = TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10) def push_granule(self, data_product_id): ''' Publishes and monitors that the granule arrived ''' datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True) dataset_monitor = DatasetMonitor(datasets[0]) rdt = self.ph.rdt_for_data_product(data_product_id) self.ph.fill_parsed_rdt(rdt) self.ph.publish_rdt_to_data_product(data_product_id, rdt) assert dataset_monitor.wait() dataset_monitor.stop() @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.dp_list = [] self.data_process_objs = [] self._output_stream_ids = [] self.granule_verified = Event() self.worker_assigned_event_verified = Event() self.dp_created_event_verified = Event() self.heartbeat_event_verified = Event() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] self.start_event_listener() # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process( ) self.dp_list.append(dataprocess_id) # validate the repository for data product algorithms persists the new resources NEW SA-1 # create_data_process call created one of each dpd_ids, _ = self.rrclient.find_resources( restype=OT.DataProcessDefinition, id_only=False) # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above self.assertTrue(dpd_ids is not None) dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess, id_only=False) # only one DP becuase the PFs that are in the code dataproduct above are not activated yet. self.assertEquals(len(dp_ids), 1) # validate the name and version label NEW SA - 2 dataprocessdef_obj = self.dataprocessclient.read_data_process_definition( dataprocessdef_id) self.assertEqual(dataprocessdef_obj.version_label, '1.0a') self.assertEqual(dataprocessdef_obj.name, 'add_arrays') # validate that the DPD has an attachment NEW SA - 21 attachment_ids, assoc_ids = self.rrclient.find_objects( dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True) self.assertEqual(len(attachment_ids), 1) attachment_obj = self.rrclient.read_attachment(attachment_ids[0]) log.debug('attachment: %s', attachment_obj) # validate that the data process resource has input and output data products associated # L4-CI-SA-RQ-364 and NEW SA-3 outproduct_ids, assoc_ids = self.rrclient.find_objects( dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True) self.assertEqual(len(outproduct_ids), 1) inproduct_ids, assoc_ids = self.rrclient.find_objects( dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True) self.assertEqual(len(inproduct_ids), 1) # Test for provenance. Get Data product produced by the data processes output_data_product_id, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance( output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 2) self.assertTrue(self.input_dp_id in output_data_product_provenance[ output_data_product_id[0]]['parents']) self.assertTrue(output_data_product_provenance[ output_data_product_id[0]]['parents'][self.input_dp_id] ['data_process_definition_id'] == dataprocessdef_id) # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in # the metadata of each output data product created by the data product algorithm. output_data_product_obj, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=False) self.assertTrue(output_data_product_obj[0].name != None) self.assertTrue(output_data_product_obj[0]._rev != None) # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=dataprocess_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription( name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route) for n in range(1, 101): rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) # validate that the output granule is received and the updated value is correct self.assertTrue(self.granule_verified.wait(self.wait_time)) # validate that the data process loaded into worker event is received (L4-CI-SA-RQ-182) self.assertTrue( self.worker_assigned_event_verified.wait(self.wait_time)) # validate that the data process create (with data product ids) event is received (NEW SA -42) self.assertTrue(self.dp_created_event_verified.wait(self.wait_time)) # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182) #this takes a while so set wait limit to large value self.assertTrue(self.heartbeat_event_verified.wait(200)) # validate that the code from the transform function can be retrieve via inspect_data_process_definition src = self.dataprocessclient.inspect_data_process_definition( dataprocessdef_id) self.assertIn('def add_arrays(a, b)', src) # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available self.dataprocessclient.delete_data_process(dataprocess_id) self.dataprocessclient.delete_data_process_definition( dataprocessdef_id) in_dp_objs, _ = self.rrclient.find_objects( subject=dataprocess_id, predicate=PRED.hasInputProduct, object_type=RT.DataProduct, id_only=True) self.assertTrue(in_dp_objs is not None) dpd_objs, _ = self.rrclient.find_subjects( subject_type=RT.DataProcessDefinition, predicate=PRED.hasDataProcess, object=dataprocess_id, id_only=True) self.assertTrue(dpd_objs is not None) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_instrumentdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. instDevice_obj, _ = self.rrclient.find_resources( restype=RT.InstrumentDevice, name='test_ctd_device') if instDevice_obj: instDevice_id = instDevice_obj[0]._id else: instDevice_obj = IonObject(RT.InstrumentDevice, name='test_ctd_device', description="test_ctd_device", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process( ) self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance( output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[ output_data_product_id[0]]['parents']) self.assertTrue(instDevice_id in output_data_product_provenance[ self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[instDevice_id]['type'] == 'InstrumentDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_platformdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. platform_device_obj, _ = self.rrclient.find_resources( restype=RT.PlatformDevice, name='TestPlatform') if platform_device_obj: platform_device_id = platform_device_obj[0]._id else: platform_device_obj = IonObject(RT.PlatformDevice, name='TestPlatform', description="TestPlatform", serial_number="12345") platform_device_id = self.imsclient.create_platform_device( platform_device=platform_device_obj) self.damsclient.assign_data_product( input_resource_id=platform_device_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process( ) self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance( output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[ output_data_product_id[0]]['parents']) self.assertTrue(platform_device_id in output_data_product_provenance[ self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[platform_device_id] ['type'] == 'PlatformDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_event_transform_worker(self): self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # test that a data process (type: data-product-in / event-out) can be defined and launched. # verify that event fields are correctly populated self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] # create the DPD and two DPs self.event_data_process_id = self.create_event_data_processes() # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=self.event_data_process_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_event_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription( name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route) self.start_event_transform_listener() self.data_modified = Event() rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) self.assertTrue(self.event_verified.wait(self.wait_time)) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_bad_argument_map(self): self._output_stream_ids = [] # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during # data process creation and that the correct exception is raised for both input and output. self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() # Set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri= 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS) add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='add_array_func') # create the data process with invalid argument map argument_map = {"arr1": "foo", "arr2": "bar"} output_param = "salinity" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual( ex.message, "Input data product does not contain the parameters defined in argument map" ) # create the data process with invalid output parameter name argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "foo" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual( ex.message, "Output data product does not contain the output parameter name provided" ) def create_event_data_processes(self): # two data processes using one transform and one DPD argument_map = {"a": "salinity"} # set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='validate_salinity_array', description='validate_salinity_array', function='validate_salinity_array', module="ion.processes.data.transforms.test.test_transform_worker", arguments=['a'], function_type=TransformFunctionType.TRANSFORM) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='validate_salinity_array', description='validate_salinity_array', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='validate_salinity_array') # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=None, argument_map=argument_map) self.damsclient.register_process(dp1_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) return dp1_data_process_id def create_data_process(self): # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "salinity" # set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri= 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, version_label='1.0a') add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='add_array_func') # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) self.damsclient.register_process(dp1_data_process_id) #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) # add an attachment object to this DPD to test new SA-21 import msgpack attachment_content = 'foo bar' attachment_obj = IonObject(RT.Attachment, name='test_attachment', attachment_type=AttachmentType.ASCII, content_type='text/plain', content=msgpack.packb(attachment_content)) att_id = self.rrclient.create_attachment(add_array_dpd_id, attachment_obj) self.addCleanup(self.rrclient.delete_attachment, att_id) return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id def create_output_data_product(self): dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition( name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id) dp1_output_dp_obj = IonObject(RT.DataProduct, name='data_process1_data_product', description='output of add array func') dp1_func_output_dp_id = self.dataproductclient.create_data_product( dp1_output_dp_obj, dp1_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id) # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_ids.append(stream_ids[0]) subscription_id = self.pubsub_client.create_subscription( 'validator', data_product_ids=[dp1_func_output_dp_id]) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) def on_granule(msg, route, stream_id): log.debug('recv_packet stream_id: %s route: %s msg: %s', stream_id, route, msg) self.validate_output_granule(msg, route, stream_id) self.granule_verified.set() validator = StandaloneStreamSubscriber('validator', callback=on_granule) validator.start() self.addCleanup(validator.stop) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) return dp1_func_output_dp_id def validate_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ data_process_event = args[0] log.debug("DataProcessStatusEvent: %s", str(data_process_event.__dict__)) # if data process already created, check origin if self.dp_list: self.assertIn(data_process_event.origin, self.dp_list) # if this is a heartbeat event then 100 granules have been processed if 'data process status update.' in data_process_event.description: self.heartbeat_event_verified.set() else: # else check that this is the assign event if 'Data process assigned to transform worker' in data_process_event.description: self.worker_assigned_event_verified.set() elif 'Data process created for data product' in data_process_event.description: self.dp_created_event_verified.set() def validate_output_granule(self, msg, route, stream_id): self.assertIn(stream_id, self._output_stream_ids) rdt = RecordDictionaryTool.load_from_granule(msg) log.debug('validate_output_granule rdt: %s', rdt) sal_val = rdt['salinity'] np.testing.assert_array_equal(sal_val, np.array([3])) def start_event_listener(self): es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event) es.start() self.addCleanup(es.stop) def validate_transform_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ status_alert_event = args[0] np.testing.assert_array_equal(status_alert_event.origin, self.stream_id) np.testing.assert_array_equal(status_alert_event.values, np.array([self.event_data_process_id])) log.debug("DeviceStatusAlertEvent: %s", str(status_alert_event.__dict__)) self.event_verified.set() def start_event_transform_listener(self): es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent, callback=self.validate_transform_event) es.start() self.addCleanup(es.stop) def test_download(self): egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' egg_path = TransformWorker.download_egg(egg_url) import pkg_resources pkg_resources.working_set.add_entry(egg_path) from ion_example.add_arrays import add_arrays a = add_arrays(1, 2) self.assertEquals(a, 3)
class TestActivateInstrumentIntegration(IonIntegrationTestCase): def setUp(self): # Start container super(TestActivateInstrumentIntegration, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.usernotificationclient = UserNotificationServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher() def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name+'_logger') producer_definition.executable = { 'module':'ion.processes.data.stream_granule_logger', 'class':'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition) configuration = { 'process':{ 'stream_id':stream_id, } } pid = self.processdispatchclient.schedule_process(process_definition_id=logger_procdef_id, configuration=configuration) return pid def _create_notification(self, user_name = '', instrument_id='', product_id=''): #-------------------------------------------------------------------------------------- # Make notification request objects #-------------------------------------------------------------------------------------- notification_request_1 = NotificationRequest( name= 'notification_1', origin=instrument_id, origin_type="instrument", event_type='ResourceLifecycleEvent') notification_request_2 = NotificationRequest( name='notification_2', origin=product_id, origin_type="data product", event_type='DetectionEvent') #-------------------------------------------------------------------------------------- # Create a user and get the user_id #-------------------------------------------------------------------------------------- user = UserInfo() user.name = user_name user.contact.email = '*****@*****.**' % user_name user_id, _ = self.rrclient.create(user) #-------------------------------------------------------------------------------------- # Create notification #-------------------------------------------------------------------------------------- self.usernotificationclient.create_notification(notification=notification_request_1, user_id=user_id) self.usernotificationclient.create_notification(notification=notification_request_2, user_id=user_id) log.debug( "test_activateInstrumentSample: create_user_notifications user_id %s", str(user_id) ) return user_id def get_datastore(self, dataset_id): dataset = self.datasetclient.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def _check_computed_attributes_of_extended_instrument(self, expected_instrument_device_id = '',extended_instrument = None): # Verify that computed attributes exist for the extended instrument self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue) # the following assert will not work without elasticsearch. #self.assertEqual( 1, len(extended_instrument.computed.user_notification_requests.value) ) # Verify the computed attribute for user notification requests self.assertEqual( 1, len(extended_instrument.computed.user_notification_requests.value) ) notifications = extended_instrument.computed.user_notification_requests.value notification = notifications[0] self.assertEqual(expected_instrument_device_id, notification.origin) self.assertEqual("instrument", notification.origin_type) self.assertEqual('ResourceLifecycleEvent', notification.event_type) def _check_computed_attributes_of_extended_product(self, expected_data_product_id = '', extended_data_product = None): self.assertEqual(expected_data_product_id, extended_data_product._id) log.debug("extended_data_product.computed: %s", extended_data_product.computed) # Verify that computed attributes exist for the extended instrument self.assertIsInstance(extended_data_product.computed.product_download_size_estimated, ComputedFloatValue) self.assertIsInstance(extended_data_product.computed.number_active_subscriptions, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.data_url, ComputedStringValue) self.assertIsInstance(extended_data_product.computed.stored_data_size, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.recent_granules, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.parameters, ComputedListValue) self.assertIsInstance(extended_data_product.computed.recent_events, ComputedEventListValue) self.assertIsInstance(extended_data_product.computed.provenance, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.user_notification_requests, ComputedListValue) self.assertIsInstance(extended_data_product.computed.active_user_subscriptions, ComputedListValue) self.assertIsInstance(extended_data_product.computed.past_user_subscriptions, ComputedListValue) self.assertIsInstance(extended_data_product.computed.last_granule, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.is_persisted, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.data_contents_updated, ComputedStringValue) self.assertIsInstance(extended_data_product.computed.data_datetime, ComputedListValue) # exact text here keeps changing to fit UI capabilities. keep assertion general... self.assertEqual( 2, len(extended_data_product.computed.data_datetime.value) ) notifications = extended_data_product.computed.user_notification_requests.value notification = notifications[0] self.assertEqual(expected_data_product_id, notification.origin) self.assertEqual("data product", notification.origin_type) self.assertEqual('DetectionEvent', notification.event_type) @attr('LOCOINT') #@unittest.skip('refactoring') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 90}}}) def test_activateInstrumentSample(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') # Create InstrumentAgent instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations = [raw_config, parsed_config]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) " , instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config, alerts= []) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True) raw_stream_def_id = self.pubsubcli.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s' , data_product_id1) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug('Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s' , dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('ctd_parsed', stream_ids[0] ) self.loggerpids.append(pid) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2) # setup notifications for the device and parsed data product user_id_1 = self._create_notification( user_name='user_1', instrument_id=instDevice_id, product_id=data_product_id1) #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users --------- user_id_2 = self._create_notification( user_name='user_2', instrument_id=instDevice_id, product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug('Data product streams2 = %s' , str(stream_ids)) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id2 = %s' , dataset_ids[0]) self.raw_dataset = dataset_ids[0] def start_instrument_agent(): self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) gevent.joinall([gevent.spawn(start_instrument_agent)]) #cleanup self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(instAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) log.debug("test_activateInstrumentSample: got ia client %s" , str(self._ia_client)) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: initialize %s" , str(retval)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.INACTIVE, state) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrument: return value from go_active %s" , str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.IDLE, state) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("(L4-CI-SA-RQ-334): current state after sending go_active command %s" , str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: run %s" , str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=ResourceAgentEvent.PAUSE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.STOPPED, state) cmd = AgentCommand(command=ResourceAgentEvent.RESUME) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=ResourceAgentEvent.CLEAR) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.IDLE, state) cmd = AgentCommand(command=ResourceAgentEvent.RUN) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) for i in xrange(10): monitor = DatasetMonitor(dataset_id=self.parsed_dataset) self._ia_client.execute_resource(AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE)) if not monitor.wait(): raise AssertionError('Failed on the %ith granule' % i) monitor.stop() # cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE) # for i in xrange(10): # retval = self._ia_client.execute_resource(cmd) # log.debug("test_activateInstrumentSample: return from sample %s" , str(retval)) log.debug( "test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s" , str(reply)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset) self.assertIsInstance(replay_data_raw, Granule) rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw) log.debug("RDT raw: %s", str(rdt_raw.pretty_print()) ) self.assertIn('raw', rdt_raw) raw_vals = rdt_raw['raw'] all_raw = "".join(raw_vals) # look for 't' entered after a prompt -- ">t" t_commands = all_raw.count(">t") if 10 != t_commands: log.error("%s raw_vals: ", len(raw_vals)) for i, r in enumerate(raw_vals): log.error("raw val %s: %s", i, [r]) self.fail("Expected 10 't' strings in raw_vals, got %s" % t_commands) else: log.debug("%s raw_vals: ", len(raw_vals)) for i, r in enumerate(raw_vals): log.debug("raw val %s: %s", i, [r]) replay_data_parsed = self.dataretrieverclient.retrieve(self.parsed_dataset) self.assertIsInstance(replay_data_parsed, Granule) rdt_parsed = RecordDictionaryTool.load_from_granule(replay_data_parsed) log.debug("test_activateInstrumentSample: RDT parsed: %s", str(rdt_parsed.pretty_print()) ) self.assertIn('temp', rdt_parsed) temp_vals = rdt_parsed['temp'] pressure_vals = rdt_parsed['pressure'] if 10 != len(temp_vals): log.error("%s temp_vals: %s", len(temp_vals), temp_vals) self.fail("Expected 10 temp_vals, got %s" % len(temp_vals)) log.debug("l4-ci-sa-rq-138") """ Physical resource control shall be subject to policy Instrument management control capabilities shall be subject to policy The actor accessing the control capabilities must be authorized to send commands. note from maurice 2012-05-18: Talk to tim M to verify that this is policy. If it is then talk with Stephen to get an example of a policy test and use that to create a test stub that will be completed when we have instrument policies. Tim M: The "actor", aka observatory operator, will access the instrument through ION. """ #-------------------------------------------------------------------------------- # Get the extended data product to see if it contains the granules #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension(data_product_id=data_product_id1, user_id=user_id_1) def poller(extended_product): return len(extended_product.computed.user_notification_requests.value) == 1 poll(poller, extended_product, timeout=30) self._check_computed_attributes_of_extended_product( expected_data_product_id = data_product_id1, extended_data_product = extended_product) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension(instrument_device_id=instDevice_id, user_id=user_id_1) #-------------------------------------------------------------------------------- # For the second user, check the extended data product and the extended intrument #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension(data_product_id=data_product_id2, user_id=user_id_2) self._check_computed_attributes_of_extended_product(expected_data_product_id = data_product_id2, extended_data_product = extended_product) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension(instrument_device_id=instDevice_id, user_id=user_id_2) self._check_computed_attributes_of_extended_instrument(expected_instrument_device_id = instDevice_id, extended_instrument = extended_instrument) #-------------------------------------------------------------------------------- # Deactivate loggers #-------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) self.dpclient.delete_data_product(data_product_id1) self.dpclient.delete_data_product(data_product_id2)
class TestIMSDeployAsPrimaryDevice(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() # self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.container.start_rel_from_url("res/deploy/r2deploy.yml") print "started services" # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() def create_logger(self, name, stream_id=""): # logger process producer_definition = ProcessDefinition(name=name + "_logger") producer_definition.executable = { "module": "ion.processes.data.stream_granule_logger", "class": "StreamGranuleLogger", } logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition) configuration = {"process": {"stream_id": stream_id}} pid = self.processdispatchclient.schedule_process( process_definition_id=logger_procdef_id, configuration=configuration ) return pid def cleanupprocs(self): stm = os.popen("ps -e | grep ion.agents.port.logger_process") procs = stm.read() if len(procs) > 0: procs = procs.split() if procs[0].isdigit(): pid = int(procs[0]) os.kill(pid, signal.SIGKILL) stm = os.popen("ps -e | grep ion.agents.instrument.zmq_driver_process") procs = stm.read() if len(procs) > 0: procs = procs.split() if procs[0].isdigit(): pid = int(procs[0]) os.kill(pid, signal.SIGKILL) # stm = os.popen('rm /tmp/*.pid.txt') @unittest.skip("Deprecated by IngestionManagement refactor, timeout on start inst agent?") def test_deploy_activate_full(self): # ensure no processes or pids are left around by agents or Sims # self.cleanupprocs() self.loggerpids = [] # ------------------------------- # Create InstrumentModel # ------------------------------- instModel_obj = IonObject(RT.InstrumentModel, name="SBE37IMModel", description="SBE37IMModel") try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" % ex) # ------------------------------- # Create InstrumentAgent # ------------------------------- instAgent_obj = IonObject( RT.InstrumentAgent, name="agent007", description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg", ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" % ex) log.debug("new InstrumentAgent id = %s", instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # ------------------------------- # Create Instrument Site # ------------------------------- instrumentSite_obj = IonObject(RT.InstrumentSite, name="instrumentSite1", description="SBE37IMInstrumentSite") try: instrumentSite_id = self.omsclient.create_instrument_site(instrument_site=instrumentSite_obj, parent_id="") except BadRequest as ex: self.fail("failed to create new InstrumentSite: %s" % ex) print "test_deployAsPrimaryDevice: new instrumentSite id = ", instrumentSite_id self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrumentSite_id) # ------------------------------- # Logical Transform: Output Data Products # ------------------------------- # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( "ctd_parsed_param_dict", id_only=True ) parsed_stream_def_id = self.pubsubclient.create_stream_definition( name="parsed", parameter_dictionary_id=parsed_pdict_id ) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_raw_param_dict", id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition(name="raw", parameter_dictionary_id=raw_pdict_id) # ------------------------------- # Create Old InstrumentDevice # ------------------------------- instDevice_obj = IonObject( RT.InstrumentDevice, name="SBE37IMDeviceYear1", description="SBE37IMDevice for the FIRST year of deployment", serial_number="12345", ) try: oldInstDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, oldInstDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) print "test_deployAsPrimaryDevice: new Year 1 InstrumentDevice id = ", oldInstDevice_id self.rrclient.execute_lifecycle_transition(oldInstDevice_id, LCE.DEPLOY) self.rrclient.execute_lifecycle_transition(oldInstDevice_id, LCE.ENABLE) # ------------------------------- # Create Raw and Parsed Data Products for the device # ------------------------------- dp_obj = IonObject( RT.DataProduct, name="SiteDataProduct", description="SiteDataProduct", temporal_domain=tdom, spatial_domain=sdom, ) instrument_site_output_dp_id = self.dataproductclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id ) self.damsclient.assign_data_product( input_resource_id=oldInstDevice_id, data_product_id=instrument_site_output_dp_id ) # self.dataproductclient.activate_data_product_persistence(data_product_id=instrument_site_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(instrument_site_output_dp_id, PRED.hasStream, None, True) log.debug("Data product streams1 = %s", stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(instrument_site_output_dp_id, PRED.hasDataset, RT.Dataset, True) log.debug("Data set for data_product_id1 = %s", dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger("ctd_parsed", stream_ids[0]) self.loggerpids.append(pid) self.omsclient.create_site_data_product(instrumentSite_id, instrument_site_output_dp_id) # ------------------------------- # Create Old Deployment # ------------------------------- deployment_obj = IonObject(RT.Deployment, name="first deployment") oldDeployment_id = self.omsclient.create_deployment(deployment_obj) # deploy this device to the logical slot self.imsclient.deploy_instrument_device(oldInstDevice_id, oldDeployment_id) self.omsclient.deploy_instrument_site(instrumentSite_id, oldDeployment_id) # ------------------------------- # Create InstrumentAgentInstance for OldInstrumentDevice to hold configuration information # cmd_port=5556, evt_port=5557, comms_method="ethernet", comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port, # ------------------------------- port_agent_config = { "device_addr": CFG.device.sbe37.host, "device_port": CFG.device.sbe37.port, "process_type": PortAgentProcessType.UNIX, "binary_path": "port_agent", "port_agent_addr": "localhost", "command_port": CFG.device.sbe37.port_agent_cmd_port, "data_port": CFG.device.sbe37.port_agent_data_port, "log_level": 5, "type": PortAgentType.ETHERNET, } raw_config = StreamConfiguration( stream_name="raw", parameter_dictionary_name="ctd_raw_param_dict", records_per_granule=2, granule_publish_rate=5, ) parsed_config = StreamConfiguration( stream_name="parsed", parameter_dictionary_name="ctd_parsed_param_dict", records_per_granule=2, granule_publish_rate=5, ) instAgentInstance_obj = IonObject( RT.InstrumentAgentInstance, name="SBE37IMAgentInstanceYear1", description="SBE37IMAgentInstanceYear1", comms_device_address="sbe37-simulator.oceanobservatories.org", comms_device_port=4001, port_agent_config=port_agent_config, stream_configurations=[raw_config, parsed_config], ) oldInstAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, oldInstDevice_id ) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() # ------------------------------- # Create CTD Parsed as the Year 1 data product and attach to instrument # ------------------------------- print "Creating new CDM data product with a stream definition" dp_obj = IonObject( RT.DataProduct, name="ctd_parsed_year1", description="ctd stream test year 1", temporal_domain=tdom, spatial_domain=sdom, ) ctd_parsed_data_product_year1 = self.dataproductclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id ) print "new ctd_parsed_data_product_id = ", ctd_parsed_data_product_year1 self.damsclient.assign_data_product( input_resource_id=oldInstDevice_id, data_product_id=ctd_parsed_data_product_year1 ) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_year1, PRED.hasStream, None, True) print "test_deployAsPrimaryDevice: Data product streams1 = ", stream_ids # ------------------------------- # Create New InstrumentDevice # ------------------------------- instDevice_obj_2 = IonObject( RT.InstrumentDevice, name="SBE37IMDeviceYear2", description="SBE37IMDevice for the SECOND year of deployment", serial_number="67890", ) try: newInstDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj_2) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, newInstDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) print "test_deployAsPrimaryDevice: new Year 2 InstrumentDevice id = ", newInstDevice_id # set the LCSTATE self.rrclient.execute_lifecycle_transition(newInstDevice_id, LCE.DEPLOY) self.rrclient.execute_lifecycle_transition(newInstDevice_id, LCE.ENABLE) instDevice_obj_2 = self.rrclient.read(newInstDevice_id) log.debug("test_deployAsPrimaryDevice: Create New InstrumentDevice LCSTATE: %s ", str(instDevice_obj_2.lcstate)) # ------------------------------- # Create Old Deployment # ------------------------------- deployment_obj = IonObject(RT.Deployment, name="second deployment") newDeployment_id = self.omsclient.create_deployment(deployment_obj) # deploy this device to the logical slot self.imsclient.deploy_instrument_device(newInstDevice_id, newDeployment_id) self.omsclient.deploy_instrument_site(instrumentSite_id, newDeployment_id) # ------------------------------- # Create InstrumentAgentInstance for NewInstrumentDevice to hold configuration information # ------------------------------- port_agent_config = { "device_addr": "sbe37-simulator.oceanobservatories.org", "device_port": 4004, "process_type": PortAgentProcessType.UNIX, "binary_path": "port_agent", "port_agent_addr": "localhost", "command_port": 4005, "data_port": 4006, "log_level": 5, "type": PortAgentType.ETHERNET, } instAgentInstance_obj = IonObject( RT.InstrumentAgentInstance, name="SBE37IMAgentInstanceYear2", description="SBE37IMAgentInstanceYear2", comms_device_address="sbe37-simulator.oceanobservatories.org", comms_device_port=4004, port_agent_config=port_agent_config, ) newInstAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, newInstDevice_id ) # ------------------------------- # Create CTD Parsed as the Year 2 data product # ------------------------------- dp_obj = IonObject( RT.DataProduct, name="ctd_parsed_year2", description="ctd stream test year 2", temporal_domain=tdom, spatial_domain=sdom, ) ctd_parsed_data_product_year2 = self.dataproductclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id ) print "new ctd_parsed_data_product_id = ", ctd_parsed_data_product_year2 self.damsclient.assign_data_product( input_resource_id=newInstDevice_id, data_product_id=ctd_parsed_data_product_year2 ) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_year2, PRED.hasStream, None, True) print "test_deployAsPrimaryDevice: Data product streams2 = ", stream_ids # ------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition # ------------------------------- log.debug("test_deployAsPrimaryDevice: create data process definition ctd_L0_all") dpd_obj = IonObject( RT.DataProcessDefinition, name="ctd_L0_all", description="transform ctd package into three separate L0 streams", module="ion.processes.data.transforms.ctd.ctd_L0_all", class_name="ctd_L0_all", ) try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new ctd_L0_all data process definition: %s" % ex) # ------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products # ------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition( name="L0_Conductivity", parameter_dictionary_id=parsed_pdict_id ) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding="conductivity" ) outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition( name="L0_Pressure", parameter_dictionary_id=parsed_pdict_id ) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding="pressure" ) outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition( name="L0_Temperature", parameter_dictionary_id=parsed_pdict_id ) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding="temperature" ) self.output_products = {} log.debug("test_deployAsPrimaryDevice: create output data product L0 conductivity") ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name="L0_Conductivity", description="transform output conductivity", temporal_domain=tdom, spatial_domain=sdom, ) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product( data_product=ctd_l0_conductivity_output_dp_obj, stream_definition_id=parsed_stream_def_id ) self.output_products["conductivity"] = ctd_l0_conductivity_output_dp_id # self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id) log.debug("test_deployAsPrimaryDevice: create output data product L0 pressure") ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name="L0_Pressure", description="transform output pressure", temporal_domain=tdom, spatial_domain=sdom, ) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product( data_product=ctd_l0_pressure_output_dp_obj, stream_definition_id=parsed_stream_def_id ) self.output_products["pressure"] = ctd_l0_pressure_output_dp_id # self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id) log.debug("test_deployAsPrimaryDevice: create output data product L0 temperature") ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name="L0_Temperature", description="transform output temperature", temporal_domain=tdom, spatial_domain=sdom, ) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product( data_product=ctd_l0_temperature_output_dp_obj, stream_definition_id=parsed_stream_def_id ) self.output_products["temperature"] = ctd_l0_temperature_output_dp_id # self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id) # ------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process, listening to Sim1 (later: logical instrument output product) # ------------------------------- log.debug("test_deployAsPrimaryDevice: create L0 all data_process start") try: ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( ctd_L0_all_dprocdef_id, [ctd_parsed_data_product_year1], self.output_products ) self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) log.debug("test_deployAsPrimaryDevice: create L0 all data_process return") # -------------------------------- # Activate the deployment # -------------------------------- self.omsclient.activate_deployment(oldDeployment_id) # ------------------------------- # Launch InstrumentAgentInstance Sim1, connect to the resource agent client # ------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=oldInstAgentInstance_id) self.addCleanup( self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=oldInstAgentInstance_id ) # wait for start instance_obj = self.imsclient.read_instrument_agent_instance(oldInstAgentInstance_id) gate = ProcessStateGate( self.processdispatchclient.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING ) self.assertTrue( gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % instance_obj.agent_process_id, ) inst_agent1_instance_obj = self.imsclient.read_instrument_agent_instance(oldInstAgentInstance_id) print "test_deployAsPrimaryDevice: Instrument agent instance obj: = ", inst_agent1_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client_sim1 = ResourceAgentClient( "iaclient Sim1", name=inst_agent1_instance_obj.agent_process_id, process=FakeProcess() ) print "activate_instrument: got _ia_client_sim1 %s", self._ia_client_sim1 log.debug(" test_deployAsPrimaryDevice:: got _ia_client_sim1 %s", str(self._ia_client_sim1)) # ------------------------------- # Launch InstrumentAgentInstance Sim2, connect to the resource agent client # ------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=newInstAgentInstance_id) self.addCleanup( self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=newInstAgentInstance_id ) # wait for start instance_obj = self.imsclient.read_instrument_agent_instance(newInstAgentInstance_id) gate = ProcessStateGate( self.processdispatchclient.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING ) self.assertTrue( gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % instance_obj.agent_process_id, ) inst_agent2_instance_obj = self.imsclient.read_instrument_agent_instance(newInstAgentInstance_id) print "test_deployAsPrimaryDevice: Instrument agent instance obj: = ", inst_agent2_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client_sim2 = ResourceAgentClient( "iaclient Sim2", name=inst_agent2_instance_obj.agent_process_id, process=FakeProcess() ) print "activate_instrument: got _ia_client_sim2 %s", self._ia_client_sim2 log.debug(" test_deployAsPrimaryDevice:: got _ia_client_sim2 %s", str(self._ia_client_sim2)) # ------------------------------- # Streaming Sim1 (old instrument) # ------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client_sim1.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: initialize %s", str(retval)) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: return value from go_active %s", str(reply)) self.assertTrue(reply) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client_sim1.execute_agent(cmd) state = retval.result log.debug("(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: run %s", str(reply)) gevent.sleep(2) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client_sim1.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from START_AUTOSAMPLE: %s", str(retval)) # ------------------------------- # Streaming Sim 2 (new instrument) # ------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client_sim2.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: initialize_sim2 %s", str(retval)) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client_sim2.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: return value from go_active_sim2 %s", str(reply)) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client_sim2.execute_agent(cmd) state = retval.result log.debug("(L4-CI-SA-RQ-334): current state after sending go_active_sim2 command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client_sim2.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: run %s", str(reply)) gevent.sleep(2) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client_sim2.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from START_AUTOSAMPLE_sim2: %s", str(retval)) gevent.sleep(10) # ------------------------------- # Shutdown Sim1 (old instrument) # ------------------------------- cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client_sim1.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from STOP_AUTOSAMPLE: %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s", str(reply)) time.sleep(5) # ------------------------------- # Shutdown Sim2 (old instrument) # ------------------------------- cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client_sim2.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from STOP_AUTOSAMPLE_sim2: %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset_sim2 ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset_sim2 %s", str(reply)) time.sleep(5)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013, 1, 1)) end = IonTime(datetime.datetime(2014, 1, 1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj)) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') platform_site_id = self.omsclient.create_platform_site( platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device( platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model( platform_model__obj) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site( instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='Log Data Product', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) out_log_data_product_id = self.dmpsclient.create_data_product( dp_obj, ctd_stream_def_id) #---------------------------------------------------------------------------------------------------- # Start the transform (a logical transform) that acts as an instrument site #---------------------------------------------------------------------------------------------------- self.omsclient.create_site_data_product( site_id=instrument_site_id, data_product_id=out_log_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device( instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject(RT.DataProduct, name='Instrument Data Product', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) inst_data_product_id = self.dmpsclient.create_data_product( dp_obj, ctd_stream_def_id) #assign data products appropriately self.damsclient.assign_data_product( input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model( instrument_model_obj) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013, 1, 1)) end = IonTime(datetime.datetime(2014, 1, 1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_platform_model_to_platform_device( res.platform_model_id, res.platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_platform_model_to_platform_site( res.platform_model_id, res.platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("adding instrument site and device to deployment") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("adding platform site and device to deployment") self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id) self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without site+device models, expecting fail") self.assert_deploy_fail( res.deployment_id, "Expected at least 1 model for InstrumentSite") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "Expected 1 model for InstrumentDevice") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "No sites were found in the deployment") #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail( res.deployment_id, "The set of devices could not be mapped to the set of sites") def assert_deploy_fail(self, deployment_id, fail_message="did not specify fail_message"): with self.assertRaises(BadRequest) as cm: self.omsclient.activate_deployment(deployment_id) self.assertIn(fail_message, cm.exception.message)
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.client = DataProductManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) def test_get_last_update(self): from ion.processes.data.last_update_cache import CACHE_DATASTORE_NAME #------------------------------------------ # Create the environment #------------------------------------------ definition = SBE37_CDM_stream_definition() datastore_name = CACHE_DATASTORE_NAME db = self.container.datastore_manager.get_datastore(datastore_name) stream_def_id = self.pubsubcli.create_stream_definition(container=definition) dp = DataProduct(name='dp1') data_product_id = self.client.create_data_product(data_product=dp, stream_definition_id=stream_def_id) stream_ids, garbage = self.rrclient.find_objects(data_product_id, PRED.hasStream, id_only=True) stream_id = stream_ids[0] fake_lu = LastUpdate() fake_lu_doc = db._ion_object_to_persistence_dict(fake_lu) db.create_doc(fake_lu_doc, object_id=stream_id) #------------------------------------------ # Now execute #------------------------------------------ res = self.client.get_last_update(data_product_id=data_product_id) self.assertTrue(isinstance(res[stream_id], LastUpdate), 'retrieving documents failed') def test_createDataProduct(self): client = self.client # create a stream definition for the data from the ctd simulator ctd_stream_def = ctd_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data') # test creating a new data product w/o a stream definition print 'test_createDataProduct: Creating new data product w/o a stream definition (L4-CI-SA-RQ-308)' dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') try: dp_id = client.create_data_product(dp_obj, '') dp_obj = client.read_data_product(dp_id) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) print 'new dp_id = ', dp_id log.debug("test_createDataProduct: Data product info from registry %s (L4-CI-SA-RQ-308)", str(dp_obj)) # test creating a new data product with a stream definition print 'Creating new data product with a stream definition' dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') try: dp_id2 = client.create_data_product(dp_obj, ctd_stream_def_id) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) print 'new dp_id = ', dp_id2 # test activate and suspend data product persistence try: client.activate_data_product_persistence(dp_id2, persist_data=True, persist_metadata=True) time.sleep(3) client.suspend_data_product_persistence(dp_id2) except BadRequest as ex: self.fail("failed to activate / deactivate data product persistence : %s" %ex) pid = self.container.spawn_process(name='dummy_process_for_test', module='pyon.ion.process', cls='SimpleProcess', config={}) dummy_process = self.container.proc_manager.procs[pid] ''' publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=self.container.node) self.ctd_stream1_publisher = publisher_registrar.create_publisher(stream_id=self.in_stream_id) msg = {'num':'3'} self.ctd_stream1_publisher.publish(msg) time.sleep(1) msg = {'num':'5'} self.ctd_stream1_publisher.publish(msg) time.sleep(1) msg = {'num':'9'} self.ctd_stream1_publisher.publish(msg) ''' # test creating a duplicate data product print 'Creating the same data product a second time (duplicate)' dp_obj.description = 'the first dp' try: dp_id = client.create_data_product(dp_obj, ctd_stream_def_id) except BadRequest as ex: print ex else: self.fail("duplicate data product was created with the same name") # test reading a non-existent data product print 'reading non-existent data product' try: dp_obj = client.read_data_product('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data product was found during read: %s" %dp_obj) # update a data product (tests read also) print 'Updating data product' # first get the existing dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry try: update_result = client.update_data_product(dp_obj) except NotFound as ex: self.fail("existing data product was not found during update") except Conflict as ex: self.fail("revision conflict exception during data product update") # now get the dp back to see if it was updated try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj self.assertTrue(dp_obj.description == 'the very first dp') # now 'delete' the data product print "deleting data product: ", dp_id try: client.delete_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during delete") # now try to get the deleted dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: pass else: self.fail("deleted data product was found during read") # now try to delete the already deleted dp object print "deleting non-existing data product" try: client.delete_data_product(dp_id) except NotFound as ex: pass else: self.fail("non-existing data product was found during delete")
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore(datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition(name='ingestion worker') ingestion_worker_definition.executable = { 'module':'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class' :'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid) self.addCleanup(self.cleaning_up) def cleaning_up(self): for pid in self.pids: log.debug("number of pids to be terminated: %s", len(self.pids)) try: self.process_dispatcher.cancel_process(pid) log.debug("Terminated the process: %s", pid) except: log.debug("could not terminate the process id: %s" % pid) IngestionManagementIntTest.clean_subscriptions() for xn in self.exchange_names: xni = self.container.ex_manager.create_xn_queue(xn) xni.delete() for xp in self.exchange_points: xpi = self.container.ex_manager.create_xp(xp) xpi.delete() def get_datastore(self, dataset_id): dataset = self.dataset_management.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 200.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = 100.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 50.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = 100.0 #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 150.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 300.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = 200.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 150.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = 200.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 250.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0) def test_data_product_stream_def(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id) self.assertEquals(ctd_stream_def_id, stream_def_id) def test_activate_suspend_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test activate and suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) self.get_datastore(dataset_ids[0]) # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data = self.data_retriever.retrieve(dataset_ids[0]) self.assertIsInstance(replay_data, Granule) log.debug("The data retriever was able to replay the dataset that was attached to the data product " "we wanted to be persisted. Therefore the data product was indeed persisted with " "otherwise we could not have retrieved its dataset using the data retriever. Therefore " "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'") data_product_object = self.rrclient.read(dp_id) self.assertEquals(data_product_object.name,'DP1') self.assertEquals(data_product_object.description,'some new dp') log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. " " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the " "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name, data_product_object.description)) #------------------------------------------------------------------------------------------------ # test suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.suspend_data_product_persistence(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.rrclient.read(dp_id)
class TestCTDPChain(IonIntegrationTestCase): def setUp(self): super(TestCTDPChain, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub = PubsubManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.dataproduct_management = DataProductManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() # This is for the time values inside the packets going into the transform self.i = 0 self.cnt = 0 # Cleanup of queue created by the subscriber self.queue_cleanup = [] self.data_process_cleanup = [] def _get_new_ctd_L0_packet(self, stream_definition_id, length): rdt = RecordDictionaryTool(stream_definition_id=stream_definition_id) rdt['time'] = numpy.arange(self.i, self.i+length) for field in rdt: if isinstance(rdt._pdict.get_context(field).param_type, QuantityType): rdt[field] = numpy.array([random.uniform(0.0,75.0) for i in xrange(length)]) g = rdt.to_granule() self.i+=length return g def clean_queues(self): for queue in self.queue_cleanup: xn = self.container.ex_manager.create_xn_queue(queue) xn.delete() def cleaning_operations(self): for dproc_id in self.data_process_cleanup: self.data_process_management.delete_data_process(dproc_id) def test_ctdp_chain(self): """ Test that packets are processed by a chain of CTDP transforms: L0, L1 and L2 """ #------------------------------------------------------------------------------------- # Prepare the stream def to be used for transform chain #------------------------------------------------------------------------------------- #todo Check whether the right parameter dictionary is being used self._prepare_stream_def_for_transform_chain() #------------------------------------------------------------------------------------- # Prepare the data proc defs and in and out data products for the transforms #------------------------------------------------------------------------------------- # list_args_L0 = [data_proc_def_id, input_dpod_id, output_dpod_id] list_args_L0 = self._prepare_things_you_need_to_launch_transform(name_of_transform='L0') list_args_L1 = self._prepare_things_you_need_to_launch_transform(name_of_transform='L1') list_args_L2_density = self._prepare_things_you_need_to_launch_transform(name_of_transform='L2_density') list_args_L2_salinity = self._prepare_things_you_need_to_launch_transform(name_of_transform='L2_salinity') log.debug("Got the following args: L0 = %s, L1 = %s, L2 density = %s, L2 salinity = %s", list_args_L0, list_args_L1, list_args_L2_density, list_args_L2_salinity ) #------------------------------------------------------------------------------------- # Launch the CTDP transforms #------------------------------------------------------------------------------------- L0_data_proc_id = self._launch_transform('L0', *list_args_L0) L1_data_proc_id = self._launch_transform('L1', *list_args_L1) L2_density_data_proc_id = self._launch_transform('L2_density', *list_args_L2_density) L2_salinity_data_proc_id = self._launch_transform('L2_salinity', *list_args_L2_salinity) log.debug("Launched the transforms: L0 = %s, L1 = %s", L0_data_proc_id, L1_data_proc_id) #------------------------------------------------------------------------- # Start a subscriber listening to the output of each of the transforms #------------------------------------------------------------------------- ar_L0 = self.start_subscriber_listening_to_L0_transform(out_data_prod_id=list_args_L0[2]) ar_L1 = self.start_subscriber_listening_to_L1_transform(out_data_prod_id=list_args_L1[2]) ar_L2_density = self.start_subscriber_listening_to_L2_density_transform(out_data_prod_id=list_args_L2_density[2]) ar_L2_salinity = self.start_subscriber_listening_to_L2_density_transform(out_data_prod_id=list_args_L2_salinity[2]) #------------------------------------------------------------------- # Publish the parsed packets that the L0 transform is listening for #------------------------------------------------------------------- stream_id, stream_route = self.get_stream_and_route_for_data_prod(data_prod_id= list_args_L0[1]) self._publish_for_L0_transform(stream_id, stream_route) #------------------------------------------------------------------- # Check the granules being outputted by the transforms #------------------------------------------------------------------- self._check_granule_from_L0_transform(ar_L0) self._check_granule_from_L1_transform(ar_L1) self._check_granule_from_L2_density_transform(ar_L2_density) self._check_granule_from_L2_salinity_transform(ar_L2_salinity) def _prepare_stream_def_for_transform_chain(self): # Get the stream definition for the stream using the parameter dictionary # pdict_id = self.dataset_management.read_parameter_dictionary_by_name(parameter_dict_name, id_only=True) pdict_id = self._create_input_param_dict_for_test(parameter_dict_name = 'input_param_for_L0') self.in_stream_def_id_for_L0 = self.pubsub.create_stream_definition(name='stream_def_for_L0', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsub.delete_stream_definition, self.in_stream_def_id_for_L0) pdict_id = self._create_input_param_dict_for_test(parameter_dict_name = 'params_for_other_transforms') self.stream_def_id = self.pubsub.create_stream_definition(name='stream_def_for_CTDBP_transforms', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsub.delete_stream_definition, self.stream_def_id) log.debug("Got the parsed parameter dictionary: id: %s", pdict_id) log.debug("Got the stream def for parsed input to L0: %s", self.in_stream_def_id_for_L0) log.debug("Got the stream def for other other streams: %s", self.stream_def_id) def _prepare_things_you_need_to_launch_transform(self, name_of_transform = ''): module, class_name = self._get_class_module(name_of_transform) #------------------------------------------------------------------------- # Data Process Definition #------------------------------------------------------------------------- dpd_obj = IonObject(RT.DataProcessDefinition, name= 'CTDBP_%s_Transform' % name_of_transform, description= 'Data Process Definition for the CTDBP %s transform.' % name_of_transform, module= module, class_name=class_name) data_proc_def_id = self.data_process_management.create_data_process_definition(dpd_obj) self.addCleanup(self.data_process_management.delete_data_process_definition, data_proc_def_id) log.debug("created data process definition: id = %s", data_proc_def_id) #------------------------------------------------------------------------- # Construct temporal and spatial Coordinate Reference System objects for the data product objects #------------------------------------------------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() #------------------------------------------------------------------------- # Get the names of the input and output data products #------------------------------------------------------------------------- input_dpod_id = '' output_dpod_id = '' if name_of_transform == 'L0': input_dpod_id = self._create_input_data_product('parsed', tdom, sdom) output_dpod_id = self._create_output_data_product('L0', tdom, sdom) self.in_prod_for_L1 = output_dpod_id elif name_of_transform == 'L1': input_dpod_id = self.in_prod_for_L1 output_dpod_id = self._create_output_data_product('L1', tdom, sdom) self.in_prod_for_L2 = output_dpod_id elif name_of_transform == 'L2_density': input_dpod_id = self.in_prod_for_L2 output_dpod_id = self._create_output_data_product('L2_density', tdom, sdom) elif name_of_transform == 'L2_salinity': input_dpod_id = self.in_prod_for_L2 output_dpod_id = self._create_output_data_product('L2_salinity', tdom, sdom) else: self.fail("something bad happened") return [data_proc_def_id, input_dpod_id, output_dpod_id] def _get_class_module(self, name_of_transform): options = {'L0' : self._class_module_L0, 'L1' : self._class_module_L1, 'L2_density' : self._class_module_L2_density, 'L2_salinity' : self._class_module_L2_salinity} return options[name_of_transform]() def _class_module_L0(self): module = 'ion.processes.data.transforms.ctdbp.ctdbp_L0' class_name = 'CTDBP_L0_all' return module, class_name def _class_module_L1(self): module = 'ion.processes.data.transforms.ctdbp.ctdbp_L1' class_name = 'CTDBP_L1_Transform' return module, class_name def _class_module_L2_density(self): module = 'ion.processes.data.transforms.ctdbp.ctdbp_L2_density' class_name = 'CTDBP_DensityTransform' return module, class_name def _class_module_L2_salinity(self): module = 'ion.processes.data.transforms.ctdbp.ctdbp_L2_salinity' class_name = 'CTDBP_SalinityTransform' return module, class_name def _create_input_data_product(self, name_of_transform = '', tdom = None, sdom = None): dpod_obj = IonObject(RT.DataProduct, name='dprod_%s' % name_of_transform, description='for_%s' % name_of_transform, temporal_domain = tdom, spatial_domain = sdom) log.debug("the stream def id: %s", self.stream_def_id) if name_of_transform == 'L0': stream_def_id = self.in_stream_def_id_for_L0 else: stream_def_id = self.stream_def_id dpod_id = self.dataproduct_management.create_data_product(data_product=dpod_obj, stream_definition_id= stream_def_id ) self.addCleanup(self.dataproduct_management.delete_data_product, dpod_id) log.debug("got the data product out. id: %s", dpod_id) return dpod_id def _create_output_data_product(self, name_of_transform = '', tdom = None, sdom = None): dpod_obj = IonObject(RT.DataProduct, name='dprod_%s' % name_of_transform, description='for_%s' % name_of_transform, temporal_domain = tdom, spatial_domain = sdom) if name_of_transform == 'L0': stream_def_id = self.in_stream_def_id_for_L0 else: stream_def_id = self.stream_def_id dpod_id = self.dataproduct_management.create_data_product(data_product=dpod_obj, stream_definition_id=stream_def_id ) self.addCleanup(self.dataproduct_management.delete_data_product, dpod_id) return dpod_id def _launch_transform(self, name_of_transform = '', data_proc_def_id = None, input_dpod_id = None, output_dpod_id = None): # We need the key name here to be "L2_stream", since when the data process is launched, this name goes into # the config as in config.process.publish_streams.L2_stream when the config is used to launch the data process if name_of_transform in ['L0', 'L1']: binding = '%s_stream' % name_of_transform elif name_of_transform == 'L2_salinity': binding = 'salinity' elif name_of_transform == 'L2_density': binding = 'density' config = None if name_of_transform == 'L1': config = self._create_calibration_coefficients_dict() elif name_of_transform == 'L2_density': config = DotDict() config.process = {'lat' : 32.7153, 'lon' : 117.1564} log.debug("launching transform for name: %s",name_of_transform ) log.debug("launching transform for data_proc_def_id: %s\ninput_dpod_id: %s\noutput_dpod_id: %s", data_proc_def_id, input_dpod_id, output_dpod_id ) data_proc_id = self.data_process_management.create_data_process( data_process_definition_id = data_proc_def_id, in_data_product_ids= [input_dpod_id], out_data_product_ids = [output_dpod_id], configuration = config) self.addCleanup(self.data_process_management.delete_data_process, data_proc_id) self.data_process_management.activate_data_process(data_proc_id) self.addCleanup(self.data_process_management.deactivate_data_process, data_proc_id) log.debug("Created a data process for ctdbp %s transform: id = %s", name_of_transform, data_proc_id) return data_proc_id def get_stream_and_route_for_data_prod(self, data_prod_id = ''): stream_ids, _ = self.resource_registry.find_objects(data_prod_id, PRED.hasStream, RT.Stream, True) stream_id = stream_ids[0] input_stream = self.resource_registry.read(stream_id) stream_route = input_stream.stream_route return stream_id, stream_route def start_subscriber_listening_to_L0_transform(self, out_data_prod_id = ''): #----------- Create subscribers to listen to the two transforms -------------------------------- stream_ids, _ = self.resource_registry.find_objects(out_data_prod_id, PRED.hasStream, RT.Stream, True) output_stream_id_of_transform = stream_ids[0] ar_L0 = self._start_subscriber_to_transform( name_of_transform = 'L0',stream_id=output_stream_id_of_transform) return ar_L0 def start_subscriber_listening_to_L1_transform(self, out_data_prod_id = ''): #----------- Create subscribers to listen to the two transforms -------------------------------- stream_ids, _ = self.resource_registry.find_objects(out_data_prod_id, PRED.hasStream, RT.Stream, True) output_stream_id_of_transform = stream_ids[0] ar_L1 = self._start_subscriber_to_transform( name_of_transform = 'L1',stream_id=output_stream_id_of_transform) return ar_L1 def start_subscriber_listening_to_L2_density_transform(self, out_data_prod_id = ''): #----------- Create subscribers to listen to the two transforms -------------------------------- stream_ids, _ = self.resource_registry.find_objects(out_data_prod_id, PRED.hasStream, RT.Stream, True) output_stream_id_of_transform = stream_ids[0] ar_L2_density = self._start_subscriber_to_transform( name_of_transform = 'L2_density', stream_id=output_stream_id_of_transform) return ar_L2_density def start_subscriber_listening_to_L2_salinity_transform(self, out_data_prod_id = ''): #----------- Create subscribers to listen to the two transforms -------------------------------- stream_ids, _ = self.resource_registry.find_objects(out_data_prod_id, PRED.hasStream, RT.Stream, True) output_stream_id_of_transform = stream_ids[0] ar_L2_density = self._start_subscriber_to_transform( name_of_transform = 'L2_salinity',stream_id=output_stream_id_of_transform) return ar_L2_density def _start_subscriber_to_transform(self, name_of_transform = '', stream_id = ''): ar = gevent.event.AsyncResult() def subscriber(m,r,s): ar.set(m) sub = StandaloneStreamSubscriber(exchange_name='sub_%s' % name_of_transform, callback=subscriber) # Note that this running the below line creates an exchange since none of that name exists before sub_id = self.pubsub.create_subscription('subscriber_to_transform_%s' % name_of_transform, stream_ids=[stream_id], exchange_name='sub_%s' % name_of_transform) self.addCleanup(self.pubsub.delete_subscription, sub_id) self.pubsub.activate_subscription(sub_id) self.addCleanup(self.pubsub.deactivate_subscription, sub_id) sub.start() self.addCleanup(sub.stop) return ar def _check_granule_from_L0_transform(self, ar = None): granule_from_transform = ar.get(timeout=20) log.debug("Got the following granule from the L0 transform: %s", granule_from_transform) # Check the algorithm being applied self._check_application_of_L0_algorithm(granule_from_transform) def _check_granule_from_L1_transform(self, ar = None): granule_from_transform = ar.get(timeout=20) log.debug("Got the following granule from the L1 transform: %s", granule_from_transform) # Check the algorithm being applied self._check_application_of_L1_algorithm(granule_from_transform) def _check_granule_from_L2_density_transform(self, ar = None): granule_from_transform = ar.get(timeout=20) log.debug("Got the following granule from the L2 transform: %s", granule_from_transform) # Check the algorithm being applied self._check_application_of_L2_density_algorithm(granule_from_transform) def _check_granule_from_L2_salinity_transform(self, ar = None): granule_from_transform = ar.get(timeout=20) log.debug("Got the following granule from the L2 transform: %s", granule_from_transform) # Check the algorithm being applied self._check_application_of_L2_salinity_algorithm(granule_from_transform) def _check_application_of_L0_algorithm(self, granule = None): """ Check the algorithm applied by the L0 transform """ rdt = RecordDictionaryTool.load_from_granule(granule) list_of_expected_keys = ['time', 'pressure', 'conductivity', 'temperature'] for key in list_of_expected_keys: self.assertIn(key, rdt) def _check_application_of_L1_algorithm(self, granule = None): """ Check the algorithm applied by the L1 transform """ rdt = RecordDictionaryTool.load_from_granule(granule) list_of_expected_keys = [ 'time', 'pressure', 'conductivity', 'temp'] for key in list_of_expected_keys: self.assertIn(key, rdt) def _check_application_of_L2_density_algorithm(self, granule = None): """ Check the algorithm applied by the L2 transform """ rdt = RecordDictionaryTool.load_from_granule(granule) list_of_expected_keys = ['time', 'density'] for key in list_of_expected_keys: self.assertIn(key, rdt) def _check_application_of_L2_salinity_algorithm(self, granule = None): """ Check the algorithm applied by the L2 transform """ rdt = RecordDictionaryTool.load_from_granule(granule) list_of_expected_keys = ['time', 'salinity'] for key in list_of_expected_keys: self.assertIn(key, rdt) def _publish_for_L0_transform(self, input_stream_id = None, stream_route = None): #----------- Publish on that stream so that the transform can receive it -------------------------------- self._publish_to_transform(input_stream_id, stream_route ) def _publish_to_transform(self, stream_id = '', stream_route = None): pub = StandaloneStreamPublisher(stream_id, stream_route) publish_granule = self._get_new_ctd_L0_packet(stream_definition_id=self.in_stream_def_id_for_L0, length = 5) pub.publish(publish_granule) log.debug("Published the following granule: %s", publish_granule) def _create_input_param_dict_for_test(self, parameter_dict_name = ''): pdict = ParameterDictionary() t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('float64'))) t_ctxt.axis = AxisTypeEnum.TIME t_ctxt.uom = 'seconds since 01-01-1900' pdict.add_context(t_ctxt) cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) cond_ctxt.uom = 'Siemens_per_meter' pdict.add_context(cond_ctxt) pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) pres_ctxt.uom = 'Pascal' pdict.add_context(pres_ctxt) if parameter_dict_name == 'input_param_for_L0': temp_ctxt = ParameterContext('temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) else: temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) temp_ctxt.uom = 'degree_kelvin' pdict.add_context(temp_ctxt) dens_ctxt = ParameterContext('density', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) dens_ctxt.uom = 'g/m' pdict.add_context(dens_ctxt) sal_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) sal_ctxt.uom = 'PSU' pdict.add_context(sal_ctxt) #create temp streamdef so the data product can create the stream pc_list = [] for pc_k, pc in pdict.iteritems(): ctxt_id = self.dataset_management.create_parameter_context(pc_k, pc[1].dump()) pc_list.append(ctxt_id) if parameter_dict_name == 'input_param_for_L0': self.addCleanup(self.dataset_management.delete_parameter_context,ctxt_id) elif pc[1].name == 'temp': self.addCleanup(self.dataset_management.delete_parameter_context,ctxt_id) pdict_id = self.dataset_management.create_parameter_dictionary(parameter_dict_name, pc_list) self.addCleanup(self.dataset_management.delete_parameter_dictionary, pdict_id) return pdict_id def _create_calibration_coefficients_dict(self): config = DotDict() config.process.calibration_coeffs = { 'temp_calibration_coeffs': { 'TA0' : 1.561342e-03, 'TA1' : 2.561486e-04, 'TA2' : 1.896537e-07, 'TA3' : 1.301189e-07, 'TOFFSET' : 0.000000e+00 }, 'cond_calibration_coeffs': { 'G' : -9.896568e-01, 'H' : 1.316599e-01, 'I' : -2.213854e-04, 'J' : 3.292199e-05, 'CPCOR' : -9.570000e-08, 'CTCOR' : 3.250000e-06, 'CSLOPE' : 1.000000e+00 }, 'pres_calibration_coeffs' : { 'PA0' : 4.960417e-02, 'PA1' : 4.883682e-04, 'PA2' : -5.687309e-12, 'PTCA0' : 5.249802e+05, 'PTCA1' : 7.595719e+00, 'PTCA2' : -1.322776e-01, 'PTCB0' : 2.503125e+01, 'PTCB1' : 5.000000e-05, 'PTCB2' : 0.000000e+00, 'PTEMPA0' : -6.431504e+01, 'PTEMPA1' : 5.168177e+01, 'PTEMPA2' : -2.847757e-01, 'POFFSET' : 0.000000e+00 } } return config
class VisStreamLauncher(ImmediateProcess): """ Class emulates a stream source from a NetCDF file. It emits a record of data every few seconds on a stream identified by a routing key. """ def on_init(self): log.debug("VizStreamProducer init. Self.id=%s" % self.id) def on_start(self): log.debug("VizStreamProducer start") self.data_source_name = self.CFG.get_safe('name', 'sine_wave_generator') self.dataset = self.CFG.get_safe('dataset', 'sinusoidal') # create a pubsub client and a resource registry client self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) # Dummy instrument related clients self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() # create the pubsub client self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) # Additional code for creating a dummy instrument """ # Set up the preconditions. Look for an existing ingestion config while True: log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.") ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True) if len(ingestion_cfgs) > 0: break else: gevent.sleep(1) """ # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash) dp_ids, _ = self.rrclient.find_resources(RT.DataProduct, None, self.data_source_name, True) if len(dp_ids) > 0: data_product_id = dp_ids[0] else: # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name=self.data_source_name, description=self.data_source_name) instModel_id = self.imsclient.create_instrument_model( instModel_obj) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name=self.data_source_name, description=self.data_source_name, serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubclient.create_stream_definition( name="SBE37_CDM", description="SBE37_CDM", parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name=self.data_source_name, description='Example ctd stream', temporal_domain=tdom, spatial_domain=sdom) data_product_id = self.dpclient.create_data_product( dp_obj, stream_definition_id=ctd_stream_def_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=data_product_id) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id) print ">>>>>>>>>>>>> Dataproduct for sine wave generator : ", data_product_id # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True) if self.dataset == 'sinusoidal': self.container.spawn_process( name='ctd_test.' + self.data_source_name, module='ion.processes.data.sinusoidal_stream_publisher', cls='SinusoidalCtdPublisher', config={'process': { 'stream_id': stream_ids[0] }}) else: self.container.spawn_process( name='ctd_test.' + self.data_source_name, module='ion.processes.data.ctd_stream_publisher', cls='SimpleCtdPublisher', config={'process': { 'stream_id': stream_ids[0] }}) """ workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition, name='Realtime_HighCharts', id_only=True) if not len(workflow_def_ids): helper_create_highcharts_workflow_def(self.container) """ def on_quit(self): super(VisStreamLauncher, self).on_quit() log.debug("VizStreamProducer quit")
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() self.identcli = IdentityManagementServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition(name='ingestion worker') ingestion_worker_definition.executable = { 'module':'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class' :'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid) self.addCleanup(self.cleaning_up) def cleaning_up(self): for pid in self.pids: log.debug("number of pids to be terminated: %s", len(self.pids)) try: self.process_dispatcher.cancel_process(pid) log.debug("Terminated the process: %s", pid) except: log.debug("could not terminate the process id: %s" % pid) IngestionManagementIntTest.clean_subscriptions() for xn in self.exchange_names: xni = self.container.ex_manager.create_xn_queue(xn) xni.delete() for xp in self.exchange_points: xpi = self.container.ex_manager.create_xp(xp) xpi.delete() def get_datastore(self, dataset_id): dataset = self.dataset_management.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore @attr('EXT') @attr('PREP') def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) #---------------------------------------------------------------------------------------- # Create users then notifications to this data product for each user #---------------------------------------------------------------------------------------- # user_1 user_1 = UserInfo() user_1.name = 'user_1' user_1.contact.email = '*****@*****.**' # user_2 user_2 = UserInfo() user_2.name = 'user_2' user_2.contact.email = '*****@*****.**' #user1 is a complete user self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254" actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject}) actor_id = self.identcli.create_actor_identity(actor_identity_obj) user_credentials_obj = IonObject("UserCredentials", {"name": self.subject}) self.identcli.register_user_credentials(actor_id, user_credentials_obj) user_id_1 = self.identcli.create_user_info(actor_id, user_1) user_id_2, _ = self.rrclient.create(user_2) delivery_config1a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) delivery_config1b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) notification_request_1 = NotificationRequest( name = "notification_1", origin=dp_id, origin_type="type_1", event_type=OT.ResourceLifecycleEvent, disabled_by_system = False, delivery_configurations=[delivery_config1a, delivery_config1b]) delivery_config2a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) delivery_config2b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) notification_request_2 = NotificationRequest( name = "notification_2", origin=dp_id, origin_type="type_2", disabled_by_system = False, event_type=OT.DetectionEvent, delivery_configurations=[delivery_config2a, delivery_config2b]) notification_request_1_id = self.unsc.create_notification(notification=notification_request_1, user_id=user_id_1) notification_request_2_id = self.unsc.create_notification(notification=notification_request_2, user_id=user_id_2) self.unsc.delete_notification(notification_request_1_id) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) #validate that there is one active and one retired user notification for this data product self.assertEqual(1, len(extended_product.computed.active_user_subscriptions.value)) self.assertEqual(1, len(extended_product.computed.past_user_subscriptions.value)) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 0) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1) self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, dp_id) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 1) self.assertEqual(data_product_data.associations['Dataset'].associated_resources[0].s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0) def test_data_product_stream_def(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id) self.assertEquals(ctd_stream_def_id, stream_def_id) def test_derived_data_product(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id) dp = DataProduct(name='Instrument DP') dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id) self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id) self.dpsc_cli.activate_data_product_persistence(dp_id) self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) dataset_id = dataset_ids[0] # Make the derived data product simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp']) tempwat_dp = DataProduct(name='TEMPWAT', category=DataProductTypeEnum.DERIVED) tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id) self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id) # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id) rdt['time'] = np.arange(20) rdt['temp'] = np.arange(20) rdt['pressure'] = np.arange(20) publisher = StandaloneStreamPublisher(stream_id,route) dataset_modified = Event() def cb(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True) es.start() self.addCleanup(es.stop) publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True) tempwat_dataset_id = tempwat_dataset_ids[0] granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt['time'], np.arange(20)) self.assertEquals(set(rdt.fields), set(['time','temp'])) def test_activate_suspend_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Construct temporal and spatial Coordinate Reference System objects dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # Subscribe to persist events #------------------------------------------------------------------------------------------------ queue = gevent.queue.Queue() def info_event_received(message, headers): queue.put(message) es = EventSubscriber(event_type=OT.InformationContentStatusEvent, callback=info_event_received, origin=dp_id, auto_delete=True) es.start() self.addCleanup(es.stop) #------------------------------------------------------------------------------------------------ # test activate and suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) dataset_id = dataset_ids[0] # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id) rdt['time'] = np.arange(20) rdt['temp'] = np.arange(20) publisher = StandaloneStreamPublisher(stream_id,route) dataset_modified = Event() def cb(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True) es.start() self.addCleanup(es.stop) publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data = self.data_retriever.retrieve(dataset_ids[0]) self.assertIsInstance(replay_data, Granule) log.debug("The data retriever was able to replay the dataset that was attached to the data product " "we wanted to be persisted. Therefore the data product was indeed persisted with " "otherwise we could not have retrieved its dataset using the data retriever. Therefore " "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'") data_product_object = self.rrclient.read(dp_id) self.assertEquals(data_product_object.name,'DP1') self.assertEquals(data_product_object.description,'some new dp') log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. " " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the " "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name, data_product_object.description)) #------------------------------------------------------------------------------------------------ # test suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.suspend_data_product_persistence(dp_id) dataset_modified.clear() rdt['time'] = np.arange(20,40) publisher.publish(rdt.to_granule()) self.assertFalse(dataset_modified.wait(2)) self.dpsc_cli.activate_data_product_persistence(dp_id) dataset_modified.clear() publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) granule = self.data_retriever.retrieve(dataset_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_almost_equal(rdt['time'], np.arange(40)) dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, id_only=True) self.assertEquals(len(dataset_ids), 1) self.dpsc_cli.suspend_data_product_persistence(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.rrclient.read(dp_id) info_event_counter = 0 runtime = 0 starttime = time.time() caught_events = [] #check that the four InfoStatusEvents were received while info_event_counter < 4 and runtime < 60 : a = queue.get(timeout=60) caught_events.append(a) info_event_counter += 1 runtime = time.time() - starttime self.assertEquals(info_event_counter, 4)
class RegistrationProcessTest(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.resource_registry = self.container.resource_registry @attr('LOCOINT') @unittest.skipIf(os.getenv( 'CEI_LAUNCH_TEST', False ), 'Host requires file-system access to coverage files, CEI mode does not support.' ) def test_get_dataset_to_xml(self): def init(self): super(RegistrationProcess, self).__init__() self.CFG = CFG RegistrationProcess.__init__ = init self.rp = RegistrationProcess() self.rp.on_start() dataset_id = self._make_dataset() coverage_path = DatasetManagementService()._get_coverage_path( dataset_id) cov = SimplexCoverage.load(coverage_path) xml_str = self.rp.get_dataset_xml(coverage_path) dom = parseString(xml_str) node = dom.getElementsByTagName('addAttributes') metadata = node[0] for n in metadata.childNodes: if n.nodeType != 3: if n.attributes["name"].value == "title": self.assertEquals(cov.name, n.childNodes[0].nodeValue) if n.attributes["name"].value == "institution": self.assertEquals('OOI', n.childNodes[0].nodeValue) if n.attributes["name"].value == "infoUrl": self.assertEquals(self.rp.pydap_url + cov.name, n.childNodes[0].nodeValue) parameters = [] node = dom.getElementsByTagName('sourceName') for n in node: if n.nodeType != 3: parameters.append(str(n.childNodes[0].nodeValue)) cov_params = [key for key in cov.list_parameters()] for p in parameters: self.assertIn(p, cov_params) cov.close() def _make_dataset(self): tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) dataset_id = self.dataset_management.create_dataset( 'test_dataset', parameter_dictionary_id=parameter_dict_id, spatial_domain=sdom, temporal_domain=tdom) return dataset_id def test_pydap(self): if not CFG.get_safe('bootstrap.use_pydap', False): raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)') ph = ParameterHelper(self.dataset_management, self.addCleanup) pdict_id = ph.create_extended_parsed() stream_def_id = self.pubsub_management.create_stream_definition( 'example', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) tdom, sdom = time_series_domain() dp = DataProduct(name='example') dp.spatial_domain = sdom.dump() dp.temporal_domain = tdom.dump() data_product_id = self.data_product_management.create_data_product( dp, stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) self.data_product_management.activate_data_product_persistence( data_product_id) self.addCleanup( self.data_product_management.suspend_data_product_persistence, data_product_id) dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0] monitor = DatasetMonitor(dataset_id) self.addCleanup(monitor.stop) rdt = ph.get_rdt(stream_def_id) ph.fill_rdt(rdt, 10) ph.publish_rdt_to_data_product(data_product_id, rdt) self.assertTrue(monitor.event.wait(10)) gevent.sleep( 1) # Yield to other greenlets, had an issue with connectivity pydap_host = CFG.get_safe('server.pydap.host', 'localhost') pydap_port = CFG.get_safe('server.pydap.port', 8001) url = 'http://%s:%s/%s' % (pydap_host, pydap_port, dataset_id) for i in xrange( 3 ): # Do it three times to test that the cache doesn't corrupt the requests/responses ds = open_url(url) np.testing.assert_array_equal(ds['time'][:], np.arange(10)) untested = [] for k, v in rdt.iteritems(): if k == rdt.temporal_parameter: continue context = rdt.context(k) if isinstance(context.param_type, QuantityType): np.testing.assert_array_equal(ds[k][k][:][0], rdt[k]) elif isinstance(context.param_type, ArrayType): if context.param_type.inner_encoding is None: values = np.empty(rdt[k].shape, dtype='O') for i, obj in enumerate(rdt[k]): values[i] = str(obj) np.testing.assert_array_equal(ds[k][k][:][0], values) elif len(rdt[k].shape) > 1: values = np.empty(rdt[k].shape[0], dtype='O') for i in xrange(rdt[k].shape[0]): values[i] = ','.join( map(lambda x: str(x), rdt[k][i].tolist())) elif isinstance(context.param_type, ConstantType): np.testing.assert_array_equal(ds[k][k][:][0], rdt[k]) elif isinstance(context.param_type, CategoryType): np.testing.assert_array_equal(ds[k][k][:][0], rdt[k]) else: untested.append('%s (%s)' % (k, context.param_type)) if untested: raise AssertionError('Untested parameters: %s' % untested)
class TestDriverEgg(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher() def get_streamConfigs(self): raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict') parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') return raw_config, parsed_config ########################## # # The following tests generate different agent configs and pass them to a common base test script # ########################### @unittest.skip("this test can't be run from coi services. it is missing dependencies") def test_driverLaunchModuleNoURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver", stream_configurations = [raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) def test_driverLaunchModuleWithURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver", driver_uri=DRV_URI_GOOD, stream_configurations = [raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) def test_driverLaunchNoModuleOnlyURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", #driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", #driver_class="SBE37Driver", driver_uri=DRV_URI_GOOD, stream_configurations = [raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) def test_driverLaunchBogusModuleWithURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="bogus", driver_class="Bogus", driver_uri=DRV_URI_GOOD, stream_configurations = [raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj) @unittest.skip("Launches an egg 'process' even though the egg download should produce error 404") def test_driverLaunchNoModule404URI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", #driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", #driver_class="SBE37Driver", driver_uri=DRV_URI_404, stream_configurations = [raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj, False) def test_driverLaunchNoModuleBadEggURI(self): raw_config, parsed_config = self.get_streamConfigs() instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", #driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", #driver_class="SBE37Driver", driver_uri=DRV_URI_BAD, stream_configurations = [raw_config, parsed_config]) self.base_activateInstrumentSample(instAgent_obj, True, False) def base_activateInstrumentSample(self, instAgent_obj, expect_launch=True, expect_command=True): """ This method runs a test of launching a driver with a given agent configuration """ # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) print 'new InstrumentModel id = %s ' % instModel_id # Create InstrumentAgent instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) print 'new InstrumentAgent id = %s' % instAgent_id self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_stream_def_id = self.pubsubcli.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test') data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) print 'new dp_id = %s' % data_product_id1 self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) print 'Data product streams1 = %s' % stream_ids # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) print 'Data set for data_product_id1 = %s' % dataset_ids[0] self.parsed_dataset = dataset_ids[0] dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test') data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) print 'new dp_id = %s' % str(data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) print 'Data product streams2 = %s' % str(stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) print 'Data set for data_product_id2 = %s' % dataset_ids[0] self.raw_dataset = dataset_ids[0] # add start/stop for instrument agent gevent.joinall([gevent.spawn(lambda: self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id))]) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(instAgentInstance_id) agent_process_id = ResourceAgentClient._get_agent_process_id(instDevice_id) print "Agent process id is '%s'" % str(agent_process_id) self.assertTrue(agent_process_id) gate = ProcessStateGate(self.processdispatchclient.read_process, agent_process_id, ProcessStateEnum.RUNNING) if not expect_launch: self.assertFalse(gate.await(30), "The instance (%s) of bogus instrument agent spawned in 30 seconds ?!?" % agent_process_id) return self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % agent_process_id) print "Instrument Agent Instance successfully triggered ProcessStateGate as RUNNING" #print 'Instrument agent instance obj: = %s' % str(inst_agent_instance_obj) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=agent_process_id, process=FakeProcess()) print "ResourceAgentClient created: %s" % str(self._ia_client) print "Sending command=ResourceAgentEvent.INITIALIZE" cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) if not expect_command: self.assertRaises(ServerError, self._ia_client.execute_agent, cmd) return retval = self._ia_client.execute_agent(cmd) print "Result of INITIALIZE: %s" % str(retval) state = self._ia_client.get_agent_state() self.assertEqual(state, ResourceAgentState.INACTIVE) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) # This gevent sleep is there to test the autosample time, which will show something different from default # only if the instrument runs for over a minute gevent.sleep(90) extended_instrument = self.imsclient.get_instrument_device_extension(instrument_device_id=instDevice_id) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) autosample_string = extended_instrument.computed.uptime.value autosampling_time = int(autosample_string.split()[4]) self.assertTrue(autosampling_time > 0) cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) print "Sending command=ResourceAgentEvent.RESET" cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) print "Result of RESET: %s" % str(reply)
class TestActivateInstrumentIntegration(IonIntegrationTestCase): def setUp(self): # Start container super(TestActivateInstrumentIntegration, self).setUp() config = DotDict() config.bootstrap.use_es = True self._start_container() self.addCleanup(TestActivateInstrumentIntegration.es_cleanup) self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.usernotificationclient = UserNotificationServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher() @staticmethod def es_cleanup(): es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') es_port = CFG.get_safe('server.elasticsearch.port', '9200') es = ep.ElasticSearch(host=es_host, port=es_port, timeout=10) indexes = STD_INDEXES.keys() indexes.append('%s_resources_index' % get_sys_name().lower()) indexes.append('%s_events_index' % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete, index) IndexManagementService._es_call(es.index_delete, index) def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name + '_logger') producer_definition.executable = { 'module': 'ion.processes.data.stream_granule_logger', 'class': 'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition( process_definition=producer_definition) configuration = { 'process': { 'stream_id': stream_id, } } pid = self.processdispatchclient.schedule_process( process_definition_id=logger_procdef_id, configuration=configuration) return pid def _create_notification(self, user_name='', instrument_id='', product_id=''): #-------------------------------------------------------------------------------------- # Make notification request objects #-------------------------------------------------------------------------------------- notification_request_1 = NotificationRequest( name='notification_1', origin=instrument_id, origin_type="instrument", event_type='ResourceLifecycleEvent') notification_request_2 = NotificationRequest( name='notification_2', origin=product_id, origin_type="data product", event_type='DetectionEvent') #-------------------------------------------------------------------------------------- # Create a user and get the user_id #-------------------------------------------------------------------------------------- user = UserInfo() user.name = user_name user.contact.email = '*****@*****.**' % user_name user_id, _ = self.rrclient.create(user) #-------------------------------------------------------------------------------------- # Create notification #-------------------------------------------------------------------------------------- self.usernotificationclient.create_notification( notification=notification_request_1, user_id=user_id) self.usernotificationclient.create_notification( notification=notification_request_2, user_id=user_id) log.debug( "test_activateInstrumentSample: create_user_notifications user_id %s", str(user_id)) return user_id def get_datastore(self, dataset_id): dataset = self.datasetclient.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def _check_computed_attributes_of_extended_instrument( self, expected_instrument_device_id='', extended_instrument=None): # Verify that computed attributes exist for the extended instrument self.assertIsInstance( extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance( extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.location_status_roll_up, ComputedIntValue) # the following assert will not work without elasticsearch. #self.assertEqual( 1, len(extended_instrument.computed.user_notification_requests.value) ) # Verify the computed attribute for user notification requests self.assertEqual( 1, len(extended_instrument.computed.user_notification_requests.value)) notifications = extended_instrument.computed.user_notification_requests.value notification = notifications[0] self.assertEqual(expected_instrument_device_id, notification.origin) self.assertEqual("instrument", notification.origin_type) self.assertEqual('ResourceLifecycleEvent', notification.event_type) def _check_computed_attributes_of_extended_product( self, expected_data_product_id='', extended_data_product=None): self.assertEqual(expected_data_product_id, extended_data_product._id) log.debug("extended_data_product.computed: %s", extended_data_product.computed) # Verify that computed attributes exist for the extended instrument self.assertIsInstance( extended_data_product.computed.product_download_size_estimated, ComputedFloatValue) self.assertIsInstance( extended_data_product.computed.number_active_subscriptions, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.data_url, ComputedStringValue) self.assertIsInstance(extended_data_product.computed.stored_data_size, ComputedIntValue) self.assertIsInstance(extended_data_product.computed.recent_granules, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.parameters, ComputedListValue) self.assertIsInstance(extended_data_product.computed.recent_events, ComputedEventListValue) self.assertIsInstance(extended_data_product.computed.provenance, ComputedDictValue) self.assertIsInstance( extended_data_product.computed.user_notification_requests, ComputedListValue) self.assertIsInstance( extended_data_product.computed.active_user_subscriptions, ComputedListValue) self.assertIsInstance( extended_data_product.computed.past_user_subscriptions, ComputedListValue) self.assertIsInstance(extended_data_product.computed.last_granule, ComputedDictValue) self.assertIsInstance(extended_data_product.computed.is_persisted, ComputedIntValue) self.assertIsInstance( extended_data_product.computed.data_contents_updated, ComputedStringValue) self.assertIsInstance(extended_data_product.computed.data_datetime, ComputedListValue) # exact text here keeps changing to fit UI capabilities. keep assertion general... self.assertIn( 'ok', extended_data_product.computed.last_granule.value['quality_flag']) self.assertEqual( 2, len(extended_data_product.computed.data_datetime.value)) notifications = extended_data_product.computed.user_notification_requests.value notification = notifications[0] self.assertEqual(expected_data_product_id, notification.origin) self.assertEqual("data product", notification.origin_type) self.assertEqual('DetectionEvent', notification.event_type) @attr('LOCOINT') #@unittest.skip('refactoring') @unittest.skipIf(not use_es, 'No ElasticSearch') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') @patch.dict(CFG, {'endpoint': {'receive': {'timeout': 90}}}) def test_activateInstrumentSample(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ' ) instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config, alerts=[]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubcli.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug('Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('ctd_parsed', stream_ids[0]) self.loggerpids.append(pid) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) # setup notifications for the device and parsed data product user_id_1 = self._create_notification(user_name='user_1', instrument_id=instDevice_id, product_id=data_product_id1) #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users --------- user_id_2 = self._create_notification(user_name='user_2', instrument_id=instDevice_id, product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug('Data product streams2 = %s', str(stream_ids)) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id2 = %s', dataset_ids[0]) self.raw_dataset = dataset_ids[0] #elastic search debug es_indexes, _ = self.container.resource_registry.find_resources( restype='ElasticSearchIndex') log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes]) log.debug('Bootstrap %s', CFG.bootstrap.use_es) def start_instrument_agent(): self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) gevent.joinall([gevent.spawn(start_instrument_agent)]) #cleanup self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) log.debug("test_activateInstrumentSample: got ia client %s", str(self._ia_client)) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: initialize %s", str(retval)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.INACTIVE, state) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrument: return value from go_active %s", str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.IDLE, state) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: run %s", str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=ResourceAgentEvent.PAUSE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.STOPPED, state) cmd = AgentCommand(command=ResourceAgentEvent.RESUME) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=ResourceAgentEvent.CLEAR) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.IDLE, state) cmd = AgentCommand(command=ResourceAgentEvent.RUN) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE) for i in xrange(10): retval = self._ia_client.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from sample %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s", str(reply)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset) self.assertIsInstance(replay_data_raw, Granule) rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw) log.debug("RDT raw: %s", str(rdt_raw.pretty_print())) self.assertIn('raw', rdt_raw) raw_vals = rdt_raw['raw'] all_raw = "".join(raw_vals) # look for 't' entered after a prompt -- ">t" t_commands = all_raw.count(">t") if 10 != t_commands: log.error("%s raw_vals: ", len(raw_vals)) for i, r in enumerate(raw_vals): log.error("raw val %s: %s", i, [r]) self.fail("Expected 10 't' strings in raw_vals, got %s" % t_commands) else: log.debug("%s raw_vals: ", len(raw_vals)) for i, r in enumerate(raw_vals): log.debug("raw val %s: %s", i, [r]) replay_data_parsed = self.dataretrieverclient.retrieve( self.parsed_dataset) self.assertIsInstance(replay_data_parsed, Granule) rdt_parsed = RecordDictionaryTool.load_from_granule(replay_data_parsed) log.debug("test_activateInstrumentSample: RDT parsed: %s", str(rdt_parsed.pretty_print())) self.assertIn('temp', rdt_parsed) temp_vals = rdt_parsed['temp'] pressure_vals = rdt_parsed['pressure'] if 10 != len(temp_vals): log.error("%s temp_vals: %s", len(temp_vals), temp_vals) self.fail("Expected 10 temp_vals, got %s" % len(temp_vals)) log.debug("l4-ci-sa-rq-138") """ Physical resource control shall be subject to policy Instrument management control capabilities shall be subject to policy The actor accessing the control capabilities must be authorized to send commands. note from maurice 2012-05-18: Talk to tim M to verify that this is policy. If it is then talk with Stephen to get an example of a policy test and use that to create a test stub that will be completed when we have instrument policies. Tim M: The "actor", aka observatory operator, will access the instrument through ION. """ #-------------------------------------------------------------------------------- # Get the extended data product to see if it contains the granules #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension( data_product_id=data_product_id1, user_id=user_id_1) def poller(extended_product): return len(extended_product.computed.user_notification_requests. value) == 1 poll(poller, extended_product, timeout=30) self._check_computed_attributes_of_extended_product( expected_data_product_id=data_product_id1, extended_data_product=extended_product) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id, user_id=user_id_1) #-------------------------------------------------------------------------------- # For the second user, check the extended data product and the extended intrument #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension( data_product_id=data_product_id2, user_id=user_id_2) self._check_computed_attributes_of_extended_product( expected_data_product_id=data_product_id2, extended_data_product=extended_product) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id, user_id=user_id_2) self._check_computed_attributes_of_extended_instrument( expected_instrument_device_id=instDevice_id, extended_instrument=extended_instrument) #-------------------------------------------------------------------------------- # Deactivate loggers #-------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) self.dpclient.delete_data_product(data_product_id1) self.dpclient.delete_data_product(data_product_id2)
class CtdbpTransformsIntTest(IonIntegrationTestCase): def setUp(self): super(CtdbpTransformsIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub = PubsubManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.dataproduct_management = DataProductManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() # This is for the time values inside the packets going into the transform self.i = 0 # Cleanup of queue created by the subscriber def _get_new_ctd_packet(self, stream_definition_id, length): rdt = RecordDictionaryTool(stream_definition_id=stream_definition_id) rdt['time'] = numpy.arange(self.i, self.i + length) for field in rdt: if isinstance( rdt._pdict.get_context(field).param_type, QuantityType): rdt[field] = numpy.array( [random.uniform(0.0, 75.0) for i in xrange(length)]) g = rdt.to_granule() self.i += length return g def _create_input_param_dict_for_test(self, parameter_dict_name=''): pdict = ParameterDictionary() t_ctxt = ParameterContext( 'time', param_type=QuantityType(value_encoding=numpy.dtype('float64'))) t_ctxt.axis = AxisTypeEnum.TIME t_ctxt.uom = 'seconds since 01-01-1900' pdict.add_context(t_ctxt) cond_ctxt = ParameterContext( 'conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) cond_ctxt.uom = '' pdict.add_context(cond_ctxt) pres_ctxt = ParameterContext( 'pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) pres_ctxt.uom = '' pdict.add_context(pres_ctxt) temp_ctxt = ParameterContext( 'temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) temp_ctxt.uom = '' pdict.add_context(temp_ctxt) dens_ctxt = ParameterContext( 'density', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) dens_ctxt.uom = '' pdict.add_context(dens_ctxt) sal_ctxt = ParameterContext( 'salinity', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) sal_ctxt.uom = '' pdict.add_context(sal_ctxt) #create temp streamdef so the data product can create the stream pc_list = [] for pc_k, pc in pdict.iteritems(): ctxt_id = self.dataset_management.create_parameter_context( pc_k, pc[1].dump()) pc_list.append(ctxt_id) self.addCleanup(self.dataset_management.delete_parameter_context, ctxt_id) pdict_id = self.dataset_management.create_parameter_dictionary( parameter_dict_name, pc_list) self.addCleanup(self.dataset_management.delete_parameter_dictionary, pdict_id) return pdict_id def test_ctdbp_L0_all(self): """ Test packets processed by the ctdbp_L0_all transform """ #----------- Data Process Definition -------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='CTDBP_L0_all', description= 'Take parsed stream and put the C, T and P into three separate L0 streams.', module='ion.processes.data.transforms.ctdbp.ctdbp_L0', class_name='CTDBP_L0_all') dprocdef_id = self.data_process_management.create_data_process_definition( dpd_obj) self.addCleanup( self.data_process_management.delete_data_process_definition, dprocdef_id) log.debug("created data process definition: id = %s", dprocdef_id) #----------- Data Products -------------------------------- # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() input_param_dict = self._create_input_param_dict_for_test( parameter_dict_name='fictitious_ctdp_param_dict') # Get the stream definition for the stream using the parameter dictionary # input_param_dict = self.dataset_management.read_parameter_dictionary_by_name('ctdbp_cdef_sample', id_only=True) input_stream_def_dict = self.pubsub.create_stream_definition( name='parsed', parameter_dictionary_id=input_param_dict) self.addCleanup(self.pubsub.delete_stream_definition, input_stream_def_dict) log.debug("Got the parsed parameter dictionary: id: %s", input_param_dict) log.debug("Got the stream def for parsed input: %s", input_stream_def_dict) # Input data product parsed_stream_dp_obj = IonObject( RT.DataProduct, name='parsed_stream', description='Parsed stream input to CTBP L0 transform', temporal_domain=tdom, spatial_domain=sdom) input_dp_id = self.dataproduct_management.create_data_product( data_product=parsed_stream_dp_obj, stream_definition_id=input_stream_def_dict) self.addCleanup(self.dataproduct_management.delete_data_product, input_dp_id) # output data product L0_stream_dp_obj = IonObject( RT.DataProduct, name='L0_stream', description='L0_stream output of CTBP L0 transform', temporal_domain=tdom, spatial_domain=sdom) L0_stream_dp_id = self.dataproduct_management.create_data_product( data_product=L0_stream_dp_obj, stream_definition_id=input_stream_def_dict) self.addCleanup(self.dataproduct_management.delete_data_product, L0_stream_dp_id) # We need the key name here to be "L0_stream", since when the data process is launched, this name goes into # the config as in config.process.publish_streams.L0_stream when the config is used to launch the data process out_stream_ids, _ = self.resource_registry.find_objects( L0_stream_dp_id, PRED.hasStream, RT.Stream, True) self.assertTrue(len(out_stream_ids)) output_stream_id = out_stream_ids[0] dproc_id = self.data_process_management.create_data_process( data_process_definition_id=dprocdef_id, in_data_product_ids=[input_dp_id], out_data_product_ids=[L0_stream_dp_id], configuration=None) self.addCleanup(self.data_process_management.delete_data_process, dproc_id) log.debug("Created a data process for ctdbp_L0. id: %s", dproc_id) # Activate the data process self.data_process_management.activate_data_process(dproc_id) self.addCleanup(self.data_process_management.deactivate_data_process, dproc_id) #----------- Find the stream that is associated with the input data product when it was created by create_data_product() -------------------------------- stream_ids, _ = self.resource_registry.find_objects( input_dp_id, PRED.hasStream, RT.Stream, True) self.assertTrue(len(stream_ids)) input_stream_id = stream_ids[0] stream_route = self.pubsub.read_stream_route(input_stream_id) log.debug("The input stream for the L0 transform: %s", input_stream_id) #----------- Create a subscriber that will listen to the transform's output -------------------------------- ar = gevent.event.AsyncResult() def subscriber(m, r, s): ar.set(m) sub = StandaloneStreamSubscriber(exchange_name='sub', callback=subscriber) sub_id = self.pubsub.create_subscription('subscriber_to_transform', stream_ids=[output_stream_id], exchange_name='sub') self.addCleanup(self.pubsub.delete_subscription, sub_id) self.pubsub.activate_subscription(sub_id) self.addCleanup(self.pubsub.deactivate_subscription, sub_id) sub.start() self.addCleanup(sub.stop) #----------- Publish on that stream so that the transform can receive it -------------------------------- pub = StandaloneStreamPublisher(input_stream_id, stream_route) publish_granule = self._get_new_ctd_packet( stream_definition_id=input_stream_def_dict, length=5) pub.publish(publish_granule) log.debug("Published the following granule: %s", publish_granule) granule_from_transform = ar.get(timeout=20) log.debug("Got the following granule from the transform: %s", granule_from_transform) # Check that the granule published by the L0 transform has the right properties self._check_granule_from_transform(granule_from_transform) def _check_granule_from_transform(self, granule): """ An internal method to check if a granule has the right properties """ pass
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create( any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_site_id, _ = self.RR.create(any_old(RT.InstrumentSite)) platform_agent_instance_id, _ = self.RR.create( any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_site_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition( name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") extended_instrument = self.IMS.get_instrument_device_extension( instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) # Lifecycle self.assertEquals(len(extended_instrument.lcstate_transitions), 5) self.assertEquals( set(extended_instrument.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate'])) self.assertEquals(len(extended_instrument.availability_transitions), 2) self.assertEquals( set(extended_instrument.availability_transitions.keys()), set(['enable', 'announce'])) # Verify that computed attributes exist for the extended instrument self.assertIsInstance( extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance( extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.location_status_roll_up, ComputedIntValue) log.debug("extended_instrument.computed: %s", extended_instrument.computed) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent instance inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id) self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) #compound assoc return list of lists so check the first element self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) extended_platform = self.IMS.get_platform_device_extension( platform_device_id) self.assertEqual(1, len(extended_platform.portals)) self.assertEqual(1, len(extended_platform.portal_instruments)) #self.assertEqual(1, len(extended_platform.computed.portal_status.value)) # no agent started so NO statuses reported self.assertEqual(1, len(extended_platform.instrument_devices)) self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id) self.assertEqual(1, len(extended_platform.instrument_models)) self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id) self.assertEquals(extended_platform.platform_agent._id, platform_agent_id) self.assertEquals(len(extended_platform.lcstate_transitions), 5) self.assertEquals( set(extended_platform.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate'])) self.assertEquals(len(extended_platform.availability_transitions), 2) self.assertEquals( set(extended_platform.availability_transitions.keys()), set(['enable', 'announce'])) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) #check data_product_parameters_set self.assertEqual( ComputedValueAvailability.PROVIDED, extended_instrument.computed.data_product_parameters_set.status) self.assertTrue('Parsed_Canonical' in extended_instrument.computed. data_product_parameters_set.value) # the ctd parameters should include 'temp' self.assertTrue('temp' in extended_instrument.computed. data_product_parameters_set.value['Parsed_Canonical']) #none of these will work because there is no agent # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.firmware_version.status) # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.operational_state.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.power_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.communications_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.data_status_roll_up.status) # self.assertEqual(DeviceStatusType.STATUS_OK, # extended_instrument.computed.data_status_roll_up.value) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.location_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.recent_events.status) # self.assertEqual([], extended_instrument.computed.recent_events.value) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_platform_agent_instance( platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.OMS.force_delete_instrument_site(instrument_site_id) self.OMS.force_delete_platform_site(platform_site_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id) def test_custom_attributes(self): """ Test assignment of custom attributes """ instModel_obj = IonObject(OT.CustomAttribute, name='SBE37IMModelAttr', description="model custom attr") instrument_model_id, _ = self.RR.create( any_old(RT.InstrumentModel, {"custom_attributes": [instModel_obj]})) instrument_device_id, _ = self.RR.create( any_old( RT.InstrumentDevice, { "custom_attributes": { "favorite_color": "red", "bogus_attr": "should raise warning" } })) self.IMS.assign_instrument_model_to_instrument_device( instrument_model_id, instrument_device_id) # cleanup self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_model(instrument_model_id) def _get_datastore(self, dataset_id): dataset = self.DSC.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_data_producer(self): idevice_id = self.IMS.create_instrument_device( any_old(RT.InstrumentDevice)) self.assertEqual( 1, len( self.RR2. find_data_producer_ids_of_instrument_device_using_has_data_producer( idevice_id))) pdevice_id = self.IMS.create_platform_device(any_old( RT.PlatformDevice)) self.assertEqual( 1, len( self.RR2. find_data_producer_ids_of_platform_device_using_has_data_producer( pdevice_id))) @attr('PREP') def test_prepare_resource_support(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create( any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_agent_instance_id, _ = self.RR.create( any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) instrument_device2_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_device3_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_device2_id, _ = self.RR.create(any_old(RT.PlatformDevice)) sensor_device2_id, _ = self.RR.create(any_old(RT.SensorDevice)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device2_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(platform_device2_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device2_id, PRED.hasDevice, instrument_device2_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(sensor_device2_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device2_id, PRED.hasDevice, instrument_device2_id) sensor_model_id #is only a target #set lcstate - used for testing prepare - not setting all to DEVELOP, only some self.RR.execute_lifecycle_transition(instrument_agent_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(instrument_device_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(instrument_device2_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_device_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_device2_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_agent_id, LCE.DEVELOP) #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition( name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") def ion_object_encoder(obj): return obj.__dict__ #First call to create instrument_data = self.IMS.prepare_instrument_device_support() #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_data._id, '') self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport) self.assertEqual( len(instrument_data.associations['InstrumentModel'].resources), 1) self.assertEqual( instrument_data.associations['InstrumentModel'].resources[0]._id, instrument_model_id) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. resources), 1) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. resources[0]._id, instrument_agent_instance_id) self.assertEqual( len(instrument_data.associations['InstrumentModel']. associated_resources), 0) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. associated_resources), 0) self.assertEqual( len(instrument_data.associations['SensorDevice'].resources), 0) #Next call to update instrument_data = self.IMS.prepare_instrument_device_support( instrument_device_id) #print 'Update results' #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_data._id, instrument_device_id) self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport) self.assertEqual( len(instrument_data.associations['InstrumentModel'].resources), 1) self.assertEqual( instrument_data.associations['InstrumentModel'].resources[0]._id, instrument_model_id) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. resources), 1) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. resources[0]._id, instrument_agent_instance_id) self.assertEqual( len(instrument_data.associations['InstrumentModel']. associated_resources), 1) self.assertEqual( instrument_data.associations['InstrumentModel']. associated_resources[0].s, instrument_device_id) self.assertEqual( instrument_data.associations['InstrumentModel']. associated_resources[0].o, instrument_model_id) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. associated_resources), 1) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. associated_resources[0].o, instrument_agent_instance_id) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. associated_resources[0].s, instrument_device_id) self.assertEqual( len(instrument_data.associations['SensorDevice'].resources), 1) self.assertEqual( instrument_data.associations['SensorDevice'].resources[0]._id, sensor_device_id) self.assertEqual( len(instrument_data.associations['SensorDevice']. associated_resources), 1) self.assertEqual( instrument_data.associations['SensorDevice']. associated_resources[0].o, instrument_device_id) self.assertEqual( instrument_data.associations['SensorDevice']. associated_resources[0].s, sensor_device_id) self.assertEqual( instrument_data.associations['InstrumentModel'].assign_request. request_parameters['instrument_device_id'], instrument_device_id) #test prepare for create of instrument agent instance instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support( ) #print 'Update results' #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_agent_data._id, '') self.assertEqual(instrument_agent_data.type_, OT.InstrumentAgentInstancePrepareSupport) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. resources), 2) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent'].resources ), 1) self.assertEqual( instrument_agent_data.associations['InstrumentAgent'].resources[0]. _id, instrument_agent_id) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. associated_resources), 0) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent']. associated_resources), 0) #test prepare for update of instrument agent instance to see if it is associated with the instrument that was created instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support( instrument_agent_instance_id=instrument_agent_instance_id) #print 'Update results' #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_agent_data._id, instrument_agent_instance_id) self.assertEqual(instrument_agent_data.type_, OT.InstrumentAgentInstancePrepareSupport) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. resources), 3) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent'].resources ), 1) self.assertEqual( instrument_agent_data.associations['InstrumentAgent'].resources[0]. _id, instrument_agent_id) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. associated_resources), 1) self.assertEqual( instrument_agent_data.associations['InstrumentDevice']. associated_resources[0].s, instrument_device_id) self.assertEqual( instrument_agent_data.associations['InstrumentDevice']. associated_resources[0].o, instrument_agent_instance_id) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent']. associated_resources), 1) self.assertEqual( instrument_agent_data.associations['InstrumentAgent']. associated_resources[0].o, instrument_agent_id) self.assertEqual( instrument_agent_data.associations['InstrumentAgent']. associated_resources[0].s, instrument_agent_instance_id) self.assertEqual( instrument_agent_data.associations['InstrumentAgent']. assign_request.request_parameters['instrument_agent_instance_id'], instrument_agent_instance_id) #test prepare for update of data product to see if it is associated with the instrument that was created data_product_data = self.DP.prepare_data_product_support( data_product_id1) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent=2) self.assertEqual(data_product_data._id, data_product_id1) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 1) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 1) self.assertEqual( data_product_data.associations['StreamDefinition']. associated_resources[0].s, data_product_id1) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 0) self.assertEqual( len(data_product_data. associations['InstrumentDeviceHasOutputProduct'].resources), 3) self.assertEqual( len(data_product_data.associations[ 'InstrumentDeviceHasOutputProduct'].associated_resources), 1) self.assertEqual( data_product_data.associations['InstrumentDeviceHasOutputProduct']. associated_resources[0].s, instrument_device_id) self.assertEqual( data_product_data.associations['InstrumentDeviceHasOutputProduct']. associated_resources[0].o, data_product_id1) self.assertEqual( len(data_product_data.associations['PlatformDevice'].resources), 2) platform_data = self.IMS.prepare_platform_device_support() #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2) self.assertEqual(platform_data._id, '') self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport) self.assertEqual( len(platform_data.associations['PlatformModel'].resources), 1) self.assertEqual( platform_data.associations['PlatformModel'].resources[0]._id, platform_model_id) self.assertEqual( len(platform_data.associations['PlatformAgentInstance'].resources), 1) self.assertEqual( platform_data.associations['PlatformAgentInstance'].resources[0]. _id, platform_agent_instance_id) self.assertEqual( len(platform_data.associations['PlatformModel']. associated_resources), 0) self.assertEqual( len(platform_data.associations['PlatformAgentInstance']. associated_resources), 0) self.assertEqual( len(platform_data.associations['InstrumentDevice'].resources), 1) platform_data = self.IMS.prepare_platform_device_support( platform_device_id) #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2) self.assertEqual(platform_data._id, platform_device_id) self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport) self.assertEqual( len(platform_data.associations['PlatformModel'].resources), 1) self.assertEqual( platform_data.associations['PlatformModel'].resources[0]._id, platform_model_id) self.assertEqual( len(platform_data.associations['PlatformAgentInstance'].resources), 1) self.assertEqual( platform_data.associations['PlatformAgentInstance'].resources[0]. _id, platform_agent_instance_id) self.assertEqual( len(platform_data.associations['PlatformModel']. associated_resources), 1) self.assertEqual( platform_data.associations['PlatformModel']. associated_resources[0].s, platform_device_id) self.assertEqual( platform_data.associations['PlatformModel']. associated_resources[0].o, platform_model_id) self.assertEqual( len(platform_data.associations['PlatformAgentInstance']. associated_resources), 1) self.assertEqual( platform_data.associations['PlatformAgentInstance']. associated_resources[0].o, platform_agent_instance_id) self.assertEqual( platform_data.associations['PlatformAgentInstance']. associated_resources[0].s, platform_device_id) self.assertEqual( len(platform_data.associations['InstrumentDevice'].resources), 2) #self.assertEqual(len(platform_data.associations['InstrumentDevice'].associated_resources), 1) #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].s, platform_device_id) #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].o, instrument_device_id) self.assertEqual( platform_data.associations['PlatformModel'].assign_request. request_parameters['platform_device_id'], platform_device_id) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.RR2.pluck(sensor_device2_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_device(instrument_device2_id) self.IMS.force_delete_platform_agent_instance( platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_device(platform_device2_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_device(sensor_device2_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id)
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.dataset_management = self.datasetclient def test_createDataProcess(self): #--------------------------------------------------------------------------- # Data Process Definition #--------------------------------------------------------------------------- dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) # Make assertion on the newly registered data process definition data_process_definition = self.rrclient.read(dprocdef_id) self.assertEquals(data_process_definition.name, 'ctd_L0_all') self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams') self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all') self.assertEquals(data_process_definition.class_name, 'ctd_L0_all') # Read the data process definition using data process management and make assertions dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id) self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all') self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all') #--------------------------------------------------------------------------- # Create an input instrument #--------------------------------------------------------------------------- instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product') instrument_id, rev = self.rrclient.create(instrument_obj) # Register the instrument so that the data producer and stream object are created data_producer_id = self.damsclient.register_instrument(instrument_id) # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id ) # Assert that the link between the stream definition and the data process definition was done assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True) self.assertIsNotNone(assocs) #--------------------------------------------------------------------------- # Input Data Product #--------------------------------------------------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() input_dp_obj = IonObject( RT.DataProduct, name='InputDataProduct', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test') #Make assertions on the input data product created input_dp_obj = self.rrclient.read(input_dp_id) self.assertEquals(input_dp_obj.name, 'InputDataProduct') self.assertEquals(input_dp_obj.description, 'some new dp') self.damsclient.assign_data_product(instrument_id, input_dp_id) # Retrieve the stream via the DataProduct->Stream associations stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True) self.in_stream_id = stream_ids[0] #--------------------------------------------------------------------------- # Output Data Product #--------------------------------------------------------------------------- outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' ) outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' ) outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' ) self.output_products={} output_dp_obj = IonObject(RT.DataProduct, name='conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id) self.output_products['conductivity'] = output_dp_id_1 output_dp_obj = IonObject(RT.DataProduct, name='pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id) self.output_products['pressure'] = output_dp_id_2 output_dp_obj = IonObject(RT.DataProduct, name='temperature', description='transform output ', temporal_domain = tdom, spatial_domain = sdom) output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id) self.output_products['temperature'] = output_dp_id_3 #--------------------------------------------------------------------------- # Create the data process #--------------------------------------------------------------------------- def _create_data_process(): dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products) return dproc_id dproc_id = _create_data_process() # Make assertions on the data process created data_process = self.dataprocessclient.read_data_process(dproc_id) # Assert that the data process has a process id attached self.assertIsNotNone(data_process.process_id) # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute self.assertIsNotNone(data_process.input_subscription_id) output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True) self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3])) @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}}) def test_createDataProcessUsingSim(self): #------------------------------- # Create InstrumentModel #------------------------------- instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" ) instModel_id = self.imsclient.create_instrument_model(instModel_obj) #------------------------------- # Create InstrumentAgent #------------------------------- instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver" ) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) #------------------------------- # Create InstrumentDevice #------------------------------- instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) #------------------------------- # Create InstrumentAgentInstance to hold configuration information #------------------------------- port_agent_config = { 'device_addr': 'sbe37-simulator.oceanobservatories.org', 'device_port': 4001, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'command_port': 4002, 'data_port': 4003, 'log_level': 5, } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", svr_addr="localhost", comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port, port_agent_config = port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # Create CTD Parsed as the first data product #------------------------------- # create a stream definition for the data from the ctd simulator pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='ctd_parsed', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True) #------------------------------- # Create CTD Raw as the second data product #------------------------------- raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id) dp_obj.name = 'ctd_raw' ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' ) outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' ) outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' ) self.output_products={} ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) self.output_products['pressure'] = ctd_l0_pressure_output_dp_id ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) self.output_products['temperature'] = ctd_l0_temperature_output_dp_id #------------------------------- # Create listener for data process events and verify that events are received. #------------------------------- # todo: add this validate for Req: L4-CI-SA-RQ-367 Data processing shall notify registered data product consumers about data processing workflow life cycle events #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products) #------------------------------- # Retrieve a list of all data process defintions in RR and validate that the DPD is listed #------------------------------- # todo: add this validate for Req: L4-CI-SA-RQ-366 Data processing shall manage data topic definitions # todo: This capability is not yet completed (Swarbhanu) self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) #todo: check that activate event is received L4-CI-SA-RQ-367 #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now # todo... that the subscription is indeed activated) (Swarbhanu) # todo: monitor process to see if it is active (sa-rq-182) ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id) input_subscription_id = ctd_l0_all_data_process.input_subscription_id subs = self.rrclient.read(input_subscription_id) self.assertTrue(subs.activated) # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id) #------------------------------- # Retrieve the extended resources for data process definition and for data process #------------------------------- extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id) self.assertEqual(1, len(extended_process_definition.data_processes)) log.debug("test_createDataProcess: extended_process_definition %s", str(extended_process_definition)) extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id) self.assertEqual(1, len(extended_process.input_data_products)) log.debug("test_createDataProcess: extended_process %s", str(extended_process)) #------------------------------- # Cleanup #------------------------------- self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id) self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id) self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id) self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name='Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'usgs_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # The usgs.nc test dataset is a download of the R1 dataset found here: # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc' dset.dataset_description.parameters['temporal_dimension'] = 'time' dset.dataset_description.parameters['zonal_dimension'] = 'lon' dset.dataset_description.parameters['meridional_dimension'] = 'lat' dset.dataset_description.parameters['vertical_dimension'] = 'z' dset.dataset_description.parameters['variables'] = [ 'water_temperature', 'streamflow', 'water_temperature_bottom', 'water_temperature_middle', 'specific_conductance', 'data_qualifier', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='dap_model') dsrc_model.model = 'DAP' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) #create temp streamdef so the data product can create the stream streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp") craft = CoverageCraft sdom, tdom = craft.create_domains() sdom = sdom.dump() tdom = tdom.dump() parameter_dictionary = craft.create_parameters() parameter_dictionary = parameter_dictionary.dump() dprod = IonObject(RT.DataProduct, name='usgs_parsed_product', description='parsed usgs product', temporal_domain = tdom, spatial_domain = sdom) # Generate the data product and associate it to the ExternalDataset dproduct_id = dpms_cli.create_data_product(data_product=dprod, stream_definition_id=streamdef_id, parameter_dictionary=parameter_dictionary) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id})) #CBM: Use CF standard_names # ttool = TaxyTool() # ttool.add_taxonomy_set('time','time') # ttool.add_taxonomy_set('lon','longitude') # ttool.add_taxonomy_set('lat','latitude') # ttool.add_taxonomy_set('z','water depth') # ttool.add_taxonomy_set('water_temperature', 'average water temperature') # ttool.add_taxonomy_set('water_temperature_bottom','water temperature at bottom of water column') # ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column') # ttool.add_taxonomy_set('streamflow', 'flow velocity of stream') # ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water') # ttool.add_taxonomy_set('data_qualifier','data qualifier flag') # # ttool.add_taxonomy_set('coords','This group contains coordinate parameters') # ttool.add_taxonomy_set('data','This group contains data parameters') # Create the logger for receiving publications self.create_stream_and_logger(name='usgs',stream_id=stream_id) pdict = ParameterDictionary() t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64'))) t_ctxt.reference_frame = AxisTypeEnum.TIME t_ctxt.uom = 'seconds since 01-01-1970' pdict.add_context(t_ctxt) lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) lat_ctxt.reference_frame = AxisTypeEnum.LAT lat_ctxt.uom = 'degree_north' pdict.add_context(lat_ctxt) lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) lon_ctxt.reference_frame = AxisTypeEnum.LON lon_ctxt.uom = 'degree_east' pdict.add_context(lon_ctxt) temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) temp_ctxt.uom = 'degree_Celsius' pdict.add_context(temp_ctxt) temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) temp_ctxt.uom = 'degree_Celsius' pdict.add_context(temp_ctxt) temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) temp_ctxt.uom = 'degree_Celsius' pdict.add_context(temp_ctxt) temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding = numpy.dtype('float32'))) temp_ctxt.uom = 'meters' pdict.add_context(temp_ctxt) cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) cond_ctxt.uom = 'unknown' pdict.add_context(cond_ctxt) pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32'))) pres_ctxt.uom = 'unknown' pdict.add_context(pres_ctxt) pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool'))) pres_ctxt.uom = 'unknown' pdict.add_context(pres_ctxt) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, #'taxonomy':ttool.dump(), 'param_dictionary':pdict.dump(), 'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':4, }
class TestDataProductProvenance(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.dpmsclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) def test_get_data_product_provenance_report(self): #Create a test device device_obj = Device(name='Device1', description='test instrument site') device_id, _ = self.rrclient.create(device_obj) self.addCleanup(self.rrclient.delete, device_id) #Create a test DataProduct data_product1_obj = DataProduct(name='DataProduct1', description='test data product 1') data_product1_id, _ = self.rrclient.create(data_product1_obj) self.addCleanup(self.rrclient.delete, data_product1_id) #Create a test DataProcess data_process_obj = DataProcess(name='DataProcess', description='test data process') data_process_id, _ = self.rrclient.create(data_process_obj) self.addCleanup(self.rrclient.delete, data_process_id) #Create a second test DataProduct data_product2_obj = DataProduct(name='DataProduct2', description='test data product 2') data_product2_id, _ = self.rrclient.create(data_product2_obj) self.addCleanup(self.rrclient.delete, data_product2_id) #Create a test DataProducer data_producer_obj = DataProducer(name='DataProducer', description='test data producer') data_producer_id, rev = self.rrclient.create(data_producer_obj) #Link the DataProcess to the second DataProduct manually assoc_id, _ = self.rrclient.create_association(subject=data_process_id, predicate=PRED.hasInputProduct, object=data_product2_id) self.addCleanup(self.rrclient.delete_association, assoc_id) # Register the instrument and process. This links the device and the data process # with their own producers self.damsclient.register_instrument(device_id) self.addCleanup(self.damsclient.unregister_instrument, device_id) self.damsclient.register_process(data_process_id) self.addCleanup(self.damsclient.unregister_process, data_process_id) #Manually link the first DataProduct with the test DataProducer assoc_id, _ = self.rrclient.create_association(subject=data_product1_id, predicate=PRED.hasDataProducer, object=data_producer_id) #Get the DataProducer linked to the DataProcess (created in register_process above) #Associate that with with DataProduct1's DataProducer data_process_producer_ids, _ = self.rrclient.find_objects(subject=data_process_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) assoc_id, _ = self.rrclient.create_association(subject=data_process_producer_ids[0], predicate=PRED.hasParent, object=data_producer_id) self.addCleanup(self.rrclient.delete_association, assoc_id) #Get the DataProducer linked to the Device (created in register_instrument #Associate that with the DataProcess's DataProducer device_producer_ids, _ = self.rrclient.find_objects(subject=device_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) assoc_id, _ = self.rrclient.create_association(subject=data_producer_id, predicate=PRED.hasParent, object=device_producer_ids[0]) #Create the links between the Device, DataProducts, DataProcess, and all DataProducers self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product1_id) self.addCleanup(self.damsclient.unassign_data_product, device_id, data_product1_id) self.damsclient.assign_data_product(input_resource_id=data_process_id, data_product_id=data_product2_id) self.addCleanup(self.damsclient.unassign_data_product, data_process_id, data_product2_id) #Traverse through the relationships to get the links between objects res = self.dpmsclient.get_data_product_provenance_report(data_product2_id) #Make sure there are four keys self.assertEqual(len(res.keys()), 4) parent_count = 0 config_count = 0 for v in res.itervalues(): if 'parent' in v: parent_count += 1 if 'config' in v: config_count += 1 #Make sure there are three parents and four configs self.assertEqual(parent_count, 3) self.assertEqual(config_count, 4) @unittest.skip('This test is obsolete with new framework') def test_get_provenance(self): #create a deployment with metadata and an initial site and device instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "") log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id)) # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" ) try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" %ex) log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id)) self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id) # Create InstrumentAgent parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict' ) instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations = [parsed_config] ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" %ex) log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) try: instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" %ex) log.debug("test_get_provenance: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) #------------------------------- # Create CTD Parsed data product #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', ctd_parsed_data_product) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product) #------------------------------- # create a data product for the site to pass the OMS check.... we need to remove this check #------------------------------- dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id) #------------------------------- # Deploy instrument device to instrument site #------------------------------- deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instDevice_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) self.omsclient.activate_deployment(deployment_id) inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False) log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) ) #------------------------------- # Create the agent instance #------------------------------- port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition ctd_L0_all") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new ctd_L0_all data process definition: %s" %ex) #------------------------------- # L1 Conductivity: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_conductivity', description='create the L1 conductivity data product', module='ion.processes.data.transforms.ctd.ctd_L1_conductivity', class_name='CTDL1ConductivityTransform') try: ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex) #------------------------------- # L1 Pressure: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_pressure', description='create the L1 pressure data product', module='ion.processes.data.transforms.ctd.ctd_L1_pressure', class_name='CTDL1PressureTransform') try: ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex) #------------------------------- # L1 Temperature: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_temperature', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L1_temperature', class_name='CTDL1TemperatureTransform') try: ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex) #------------------------------- # L2 Salinity: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition SalinityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_salinity', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_salinity', class_name='SalinityTransform') try: ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new SalinityTransform data process definition: %s" %ex) #------------------------------- # L2 Density: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition DensityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_density', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_density', class_name='DensityTransform') try: ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new DensityTransform data process definition: %s" %ex) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' ) outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' ) outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' ) log.debug("TestDataProductProvenance: create output data product L0 conductivity") ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) log.debug("TestDataProductProvenance: create output data product L0 pressure") ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) log.debug("TestDataProductProvenance: create output data product L0 temperature") ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) #------------------------------- # L1 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' ) outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' ) outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' ) log.debug("TestDataProductProvenance: create output data product L1 conductivity") ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct, name='L1_Conductivity', description='transform output L1 conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj, outgoing_stream_l1_conductivity_id) log.debug("TestDataProductProvenance: create output data product L1 pressure") ctd_l1_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L1_Pressure', description='transform output L1 pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id) log.debug("TestDataProductProvenance: create output data product L1 temperature") ctd_l1_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L1_Temperature', description='transform output L1 temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj, outgoing_stream_l1_temperature_id) #------------------------------- # L2 Salinity - Density: Output Data Products #------------------------------- outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' ) outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' ) log.debug("TestDataProductProvenance: create output data product L2 Salinity") ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct, name='L2_Salinity', description='transform output L2 salinity', temporal_domain = tdom, spatial_domain = sdom) ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id) log.debug("TestDataProductProvenance: create output data product L2 Density") # ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, # name='L2_Density', # description='transform output pressure', # temporal_domain = tdom, # spatial_domain = sdom) # # ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, # outgoing_stream_l2_density_id, # parameter_dictionary) contactInfo = ContactInformation() contactInfo.individual_names_given = "Bill" contactInfo.individual_name_family = "Smith" contactInfo.street_address = "111 First St" contactInfo.city = "San Diego" contactInfo.email = "*****@*****.**" contactInfo.phones = ["858-555-6666"] contactInfo.country = "USA" contactInfo.postal_code = "92123" ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, name='L2_Density', description='transform output pressure', contacts = [contactInfo], iso_topic_category = "my_iso_topic_category_here", quality_control_level = "1", temporal_domain = tdom, spatial_domain = sdom) ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L0 all data_process start") try: input_data_products = [ctd_parsed_data_product] output_data_products = [ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id, ctd_l0_temperature_output_dp_id] ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L0_all_dprocdef_id, in_data_product_ids = input_data_products, out_data_product_ids = output_data_products ) #activate only this data process just for coverage self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) contents = "this is the lookup table contents, replace with a file..." att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att) log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment) log.debug("TestDataProductProvenance: create L0 all data_process return") #------------------------------- # L1 Conductivity: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1 Conductivity data_process start") try: l1_conductivity_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L1_conductivity_dprocdef_id, in_data_product_ids = [ctd_l0_conductivity_output_dp_id], out_data_product_ids = [ctd_l1_conductivity_output_dp_id]) self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L1 Pressure: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1_Pressure data_process start") try: l1_pressure_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L1_pressure_dprocdef_id, in_data_product_ids = [ctd_l0_pressure_output_dp_id], out_data_product_ids = [ctd_l1_pressure_output_dp_id]) self.dataprocessclient.activate_data_process(l1_pressure_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L1 Temperature: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1_Pressure data_process start") try: l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L1_temperature_dprocdef_id, in_data_product_ids = [ctd_l0_temperature_output_dp_id], out_data_product_ids = [ctd_l1_temperature_output_dp_id]) self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L2 Salinity: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L2_salinity data_process start") try: l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L2_salinity_dprocdef_id, in_data_product_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], out_data_product_ids = [ctd_l2_salinity_output_dp_id]) self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L2 Density: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L2_Density data_process start") try: in_dp_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id] out_dp_ids = [ctd_l2_density_output_dp_id] l2_density_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L2_density_dprocdef_id, in_data_product_ids = in_dp_ids, out_data_product_ids = out_dp_ids) self.dataprocessclient.activate_data_process(l2_density_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id) print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj # Start a resource agent client to talk with the instrument agent. # self._ia_client = ResourceAgentClient('iaclient', name=ResourceAgentClient._get_agent_process_id(instDevice_id, process=FakeProcess()) # print 'activate_instrument: got ia client %s', self._ia_client # log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id) self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id) self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id) self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id) self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id) self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id) #------------------------------- # Retrieve the provenance info for the ctd density data product #------------------------------- provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id) log.debug("TestDataProductProvenance: provenance_dict %s", str(provenance_dict)) #validate that products are represented self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)]) density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id]) #------------------------------- # Retrieve the extended resource for this data product #------------------------------- extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id) self.assertEqual(1, len(extended_product.data_processes) ) self.assertEqual(3, len(extended_product.process_input_data_products) ) # log.debug("TestDataProductProvenance: DataProduct provenance_product_list %s", str(extended_product.provenance_product_list)) # log.debug("TestDataProductProvenance: DataProduct data_processes %s", str(extended_product.data_processes)) # log.debug("TestDataProductProvenance: DataProduct process_input_data_products %s", str(extended_product.process_input_data_products)) # log.debug("TestDataProductProvenance: provenance %s", str(extended_product.computed.provenance.value)) #------------------------------- # Retrieve the extended resource for this data process #------------------------------- extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id) # log.debug("TestDataProductProvenance: DataProcess extended_process_def %s", str(extended_process_def)) # log.debug("TestDataProductProvenance: DataProcess data_processes %s", str(extended_process_def.data_processes)) # log.debug("TestDataProductProvenance: DataProcess data_products %s", str(extended_process_def.data_products)) self.assertEqual(1, len(extended_process_def.data_processes) ) self.assertEqual(3, len(extended_process_def.output_stream_definitions) ) self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process #------------------------------- # Request the xml report #------------------------------- results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id) print results #------------------------------- # Cleanup #------------------------------- self.dpmsclient.delete_data_product(ctd_parsed_data_product) self.dpmsclient.delete_data_product(log_data_product_id) self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
class EventManagementIntTest(IonIntegrationTestCase): def setUp(self): super(EventManagementIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.event_management = EventManagementServiceClient() self.rrc = ResourceRegistryServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.pubsub = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.queue_cleanup = [] self.exchange_cleanup = [] def tearDown(self): for queue in self.queue_cleanup: xn = self.container.ex_manager.create_xn_queue(queue) xn.delete() for exchange in self.exchange_cleanup: xp = self.container.ex_manager.create_xp(exchange) xp.delete() def test_create_read_update_delete_event_type(self): """ Test that the CRUD method for event types work correctly """ event_type = EventType(name="an event type") event_type.origin = 'instrument_1' # create event_type_id = self.event_management.create_event_type(event_type) self.assertIsNotNone(event_type_id) # read read_event_type = self.event_management.read_event_type(event_type_id) self.assertEquals(read_event_type.name, event_type.name) self.assertEquals(read_event_type.origin, event_type.origin) #update read_event_type.origin = 'instrument_2' read_event_type.producer = 'producer' self.event_management.update_event_type(read_event_type) updated_event_type = self.event_management.read_event_type(event_type_id) self.assertEquals(updated_event_type.origin, 'instrument_2') self.assertEquals(updated_event_type.producer, 'producer') # delete self.event_management.delete_event_type(event_type_id) with self.assertRaises(NotFound): self.event_management.read_event_type(event_type_id) def test_create_read_update_delete_event_process_definition(self): """ Test that the CRUD methods for the event process definitions work correctly """ # Create module = 'ion.processes.data.transforms.event_alert_transform' class_name = 'EventAlertTransform' procdef_id = self.event_management.create_event_process_definition(version='ver_1', module=module, class_name=class_name, uri='http://hare.com', arguments=['arg1', 'arg2'], event_types=['ExampleDetectableEvent', 'type_2'], sub_types=['sub_type_1']) # Read read_process_def = self.event_management.read_event_process_definition(procdef_id) self.assertEquals(read_process_def.executable['module'], module) self.assertEquals(read_process_def.executable['class'], class_name) # Update self.event_management.update_event_process_definition(event_process_definition_id=procdef_id, class_name='StreamAlertTransform', arguments=['arg3', 'arg4'], event_types=['event_type_new']) updated_event_process_def = self.event_management.read_event_process_definition(procdef_id) self.assertEquals(updated_event_process_def.executable['class'], 'StreamAlertTransform') self.assertEquals(updated_event_process_def.arguments, ['arg3', 'arg4']) definition = updated_event_process_def.definition self.assertEquals(updated_event_process_def.definition.event_types, ['event_type_new']) # Delete self.event_management.delete_event_process_definition(procdef_id) with self.assertRaises(NotFound): self.event_management.read_event_process_definition(procdef_id) def test_event_in_stream_out_transform(self): """ Test the event-in/stream-out transform """ stream_id, _ = self.pubsub.create_stream('test_stream', exchange_point='science_data') self.exchange_cleanup.append('science_data') #--------------------------------------------------------------------------------------------- # Launch a ctd transform #--------------------------------------------------------------------------------------------- # Create the process definition process_definition = ProcessDefinition( name='EventToStreamTransform', description='For testing an event-in/stream-out transform') process_definition.executable['module']= 'ion.processes.data.transforms.event_in_stream_out_transform' process_definition.executable['class'] = 'EventToStreamTransform' proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition) # Build the config config = DotDict() config.process.queue_name = 'test_queue' config.process.exchange_point = 'science_data' config.process.publish_streams.output = stream_id config.process.event_type = 'ExampleDetectableEvent' config.process.variables = ['voltage', 'temperature' ] # Schedule the process pid = self.process_dispatcher.schedule_process(process_definition_id=proc_def_id, configuration=config) self.addCleanup(self.process_dispatcher.cancel_process,pid) #--------------------------------------------------------------------------------------------- # Create a subscriber for testing #--------------------------------------------------------------------------------------------- ar_cond = gevent.event.AsyncResult() def subscriber_callback(m, r, s): ar_cond.set(m) sub = StandaloneStreamSubscriber('sub', subscriber_callback) self.addCleanup(sub.stop) sub_id = self.pubsub.create_subscription('subscription_cond', stream_ids=[stream_id], exchange_name='sub') self.pubsub.activate_subscription(sub_id) self.queue_cleanup.append(sub.xn.queue) sub.start() gevent.sleep(4) #--------------------------------------------------------------------------------------------- # Publish an event. The transform has been configured to receive this event #--------------------------------------------------------------------------------------------- event_publisher = EventPublisher("ExampleDetectableEvent") event_publisher.publish_event(origin = 'fake_origin', voltage = '5', temperature = '273') # Assert that the transform processed the event and published data on the output stream result_cond = ar_cond.get(timeout=10) self.assertTrue(result_cond) def test_create_read_delete_event_process(self): """ Test that the CRUD methods for the event processes work correctly """ #--------------------------------------------------------------------------------------------- # Create a process definition #--------------------------------------------------------------------------------------------- # Create module = 'ion.processes.data.transforms.event_alert_transform' class_name = 'EventAlertTransform' procdef_id = self.event_management.create_event_process_definition(version='ver_1', module=module, class_name=class_name, uri='http://hare.com', arguments=['arg1', 'arg2'], event_types=['ExampleDetectableEvent', 'type_2'], sub_types=['sub_type_1']) # Read read_process_def = self.event_management.read_event_process_definition(procdef_id) self.assertEquals(read_process_def.arguments, ['arg1', 'arg2']) #--------------------------------------------------------------------------------------------- # Use the process definition to create a process #--------------------------------------------------------------------------------------------- # Create a stream param_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True) stream_def_id = self.pubsub.create_stream_definition('cond_stream_def', parameter_dictionary_id=param_dict_id) tdom, sdom = time_series_domain() tdom, sdom = tdom.dump(), sdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) # Create a data product data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id) output_products = {} output_products['conductivity'] = data_product_id # Create an event process event_process_id = self.event_management.create_event_process( process_definition_id=procdef_id, event_types=['ExampleDetectableEvent','DetectionEvent'], sub_types=['s1', 's2'], origins=['or_1', 'or_2'], origin_types=['or_t1', 'or_t2'], out_data_products = output_products) self.addCleanup(self.process_dispatcher.cancel_process, event_process_id) #--------------------------------------------------------------------------------------------- # Read the event process object and make assertions #--------------------------------------------------------------------------------------------- out_data_products={'conductivity': data_product_id}) event_process_obj = self.event_management.read_event_process(event_process_id=event_process_id) # Get the stream associated with the data product for the sake of making assertions stream_ids, _ = self.rrc.find_objects(data_product_id, PRED.hasStream, id_only=True) stream_id = stream_ids[0] # Assertions! self.assertEquals(event_process_obj.detail.output_streams['conductivity'], stream_id) self.assertEquals(event_process_obj.detail.event_types, ['ExampleDetectableEvent', 'DetectionEvent']) self.assertEquals(event_process_obj.detail.sub_types, ['s1', 's2']) self.assertEquals(event_process_obj.detail.origins, ['or_1', 'or_2']) self.assertEquals(event_process_obj.detail.origin_types, ['or_t1', 'or_t2'])
class TestBulkIngest(IonIntegrationTestCase): EDA_MOD = 'ion.agents.data.external_dataset_agent' EDA_CLS = 'ExternalDatasetAgent' def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.data_retriever = DataRetrieverServiceClient(node=self.container.node) self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) # Data async and subscription TODO: Replace with new subscriber self._finished_count = None #TODO: Switch to gevent.queue.Queue self._async_finished_result = AsyncResult() self._finished_events_received = [] self._finished_event_subscriber = None self._start_finished_event_subscriber() self.addCleanup(self._stop_finished_event_subscriber) self.DVR_CONFIG = {} self.DVR_CONFIG = { 'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler', 'dvr_cls' : 'SlocumDataHandler', } self._setup_resources() self.agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : {}, 'agent' : {'resource_id': self.EDA_RESOURCE_ID}, 'test_mode' : True } datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='ExternalDatasetAgentInstance1', description='external data agent instance', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS, dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config ) self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj, external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID) #TG: Setup/configure the granule logger to log granules as they're published pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id) dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id) print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('datasetagentclient', name=pid, process=FakeProcess()) log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name+'_logger') producer_definition.executable = { 'module':'ion.processes.data.stream_granule_logger', 'class':'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition) configuration = { 'process':{ 'stream_id':stream_id, } } pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration) return pid def _start_finished_event_subscriber(self): def consume_event(*args,**kwargs): log.debug('EventSubscriber event received: %s', str(args[0]) ) if args[0].description == 'TestingFinished': log.debug('TestingFinished event received') self._finished_events_received.append(args[0]) if self._finished_count and self._finished_count == len(self._finished_events_received): log.debug('Finishing test...') self._async_finished_result.set(len(self._finished_events_received)) log.debug('Called self._async_finished_result.set({0})'.format(len(self._finished_events_received))) self._finished_event_subscriber = EventSubscriber(event_type='DeviceEvent', callback=consume_event) self._finished_event_subscriber.start() def _stop_finished_event_subscriber(self): if self._finished_event_subscriber: self._finished_event_subscriber.stop() self._finished_event_subscriber = None def tearDown(self): pass @unittest.skip('Update to agent refactor.') def test_slocum_data_ingest(self): HIST_CONSTRAINTS_1 = {} # Test instrument driver execute interface to start and stop streaming mode. cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) cmd = AgentCommand(command='initialize') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.INACTIVE) cmd = AgentCommand(command='go_active') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) # Make sure the polling interval is appropriate for a test params = { 'POLLING_INTERVAL': 3 } self._ia_client.set_param(params) self._finished_count = 1 cmd = AgentCommand(command='acquire_data') self._ia_client.execute(cmd) # Assert that data was received self._async_finished_result.get(timeout=15) self.assertTrue(len(self._finished_events_received) >= 1) cmd = AgentCommand(command='reset') retval = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) #todo enable after Luke's mor to retrieve, right now must have the Time axis called 'time' # replay_granule = self.data_retriever.retrieve_last_data_points(self.dataset_id, 10) # rdt = RecordDictionaryTool.load_from_granule(replay_granule) # # comp = rdt['date_pattern'] == numpy.arange(10) + 10 # # log.debug("TestBulkIngest: comp: %s", comp) # # self.assertTrue(comp.all()) for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) def _setup_resources(self): self.loggerpids = [] # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSetModel dataset_model = ExternalDatasetModel(name='slocum_model') dataset_model.datset_type = 'SLOCUM' dataset_model_id = self.dams_client.create_external_dataset_model(dataset_model) # Create ExternalDataset ds_name = 'slocum_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['base_url'] = 'test_data/slocum/' dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat' dset.dataset_description.parameters['date_pattern'] = '%Y %j' dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ 'c_wpt_y_lmc', 'sci_water_cond', 'm_y_lmc', 'u_hd_fin_ap_inflection_holdoff', 'sci_m_present_time', 'm_leakdetect_voltage_forward', 'sci_bb3slo_b660_scaled', 'c_science_send_all', 'm_gps_status', 'm_water_vx', 'm_water_vy', 'c_heading', 'sci_fl3slo_chlor_units', 'u_hd_fin_ap_gain', 'm_vacuum', 'u_min_water_depth', 'm_gps_lat', 'm_veh_temp', 'f_fin_offset', 'u_hd_fin_ap_hardover_holdoff', 'c_alt_time', 'm_present_time', 'm_heading', 'sci_bb3slo_b532_scaled', 'sci_fl3slo_cdom_units', 'm_fin', 'x_cycle_overrun_in_ms', 'sci_water_pressure', 'u_hd_fin_ap_igain', 'sci_fl3slo_phyco_units', 'm_battpos', 'sci_bb3slo_b470_scaled', 'm_lat', 'm_gps_lon', 'sci_ctd41cp_timestamp', 'm_pressure', 'c_wpt_x_lmc', 'c_ballast_pumped', 'x_lmc_xy_source', 'm_lon', 'm_avg_speed', 'sci_water_temp', 'u_pitch_ap_gain', 'm_roll', 'm_tot_num_inflections', 'm_x_lmc', 'u_pitch_ap_deadband', 'm_final_water_vy', 'm_final_water_vx', 'm_water_depth', 'm_leakdetect_voltage', 'u_pitch_max_delta_battpos', 'm_coulomb_amphr', 'm_pitch', ] ## Create the external dataset ds_id = self.dams_client.create_external_dataset(external_dataset=dset, external_dataset_model_id=dataset_model_id) ext_dprov_id = self.dams_client.create_external_data_provider(external_data_provider=dprov) # Register the ExternalDataset dproducer_id = self.dams_client.register_external_data_set(external_dataset_id=ds_id) ## Create the dataset agent datasetagent_obj = IonObject(RT.ExternalDatasetAgent, name='ExternalDatasetAgent1', description='external data agent', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS ) self.datasetagent_id = self.dams_client.create_external_dataset_agent(external_dataset_agent=datasetagent_obj, external_dataset_model_id=dataset_model_id) # Generate the data product and associate it to the ExternalDataset pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict') streamdef_id = self.pubsub_client.create_stream_definition(name="temp", parameter_dictionary_id=pdict.identifier) tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dprod = IonObject(RT.DataProduct, name='slocum_parsed_product', description='parsed slocum product', temporal_domain = tdom, spatial_domain = sdom) self.dproduct_id = self.dataproductclient.create_data_product(data_product=dprod, stream_definition_id=streamdef_id) self.dams_client.assign_data_product(input_resource_id=ds_id, data_product_id=self.dproduct_id) #save the incoming slocum data self.dataproductclient.activate_data_product_persistence(self.dproduct_id) stream_ids, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_ids[0] dataset_id, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True) self.dataset_id = dataset_id[0] pid = self.create_logger('slocum_parsed_product', stream_id ) self.loggerpids.append(pid) self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'param_dictionary':pdict.dump(), 'data_producer_id':dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':20, } # Create the logger for receiving publications #self.create_stream_and_logger(name='slocum',stream_id=stream_id) # Create agent config. self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name
class TestOmsLaunch(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.platformModel_id = None # rsn_oms: to retrieve network structure and information from RSN-OMS: # Note that OmsClientFactory will create an "embedded" RSN OMS # simulator object by default. self.rsn_oms = OmsClientFactory.create_instance() self.all_platforms = {} self.topology = {} self.agent_device_map = {} self.agent_streamconfig_map = {} self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber() self._set_up_DataProduct_obj() self._set_up_PlatformModel_obj() def _set_up_DataProduct_obj(self): # Create data product object to be used for each of the platform log streams tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() pdict_id = self.dataset_management.read_parameter_dictionary_by_name('platform_eng_parsed', id_only=True) self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition( name='platform_eng', parameter_dictionary_id=pdict_id) self.dp_obj = IonObject(RT.DataProduct, name='platform_eng data', description='platform_eng test', temporal_domain = tdom, spatial_domain = sdom) def _set_up_PlatformModel_obj(self): # Create PlatformModel platformModel_obj = IonObject(RT.PlatformModel, name='RSNPlatformModel', description="RSNPlatformModel") try: self.platformModel_id = self.imsclient.create_platform_model(platformModel_obj) except BadRequest as ex: self.fail("failed to create new PLatformModel: %s" %ex) log.debug( 'new PlatformModel id = %s', self.platformModel_id) def _traverse(self, platform_id, parent_platform_objs=None): """ Recursive routine that repeatedly calls _prepare_platform to build the object dictionary for each platform. @param platform_id ID of the platform to be visited @param parent_platform_objs dict of objects associated to parent platform, if any. @retval the dict returned by _prepare_platform at this level. """ log.info("Starting _traverse for %r", platform_id) plat_objs = self._prepare_platform(platform_id, parent_platform_objs) self.all_platforms[platform_id] = plat_objs # now, traverse the children: retval = self.rsn_oms.getSubplatformIDs(platform_id) subplatform_ids = retval[platform_id] for subplatform_id in subplatform_ids: self._traverse(subplatform_id, plat_objs) # note, topology indexed by platform_id self.topology[platform_id] = plat_objs['children'] return plat_objs def _prepare_platform(self, platform_id, parent_platform_objs): """ This routine generalizes the manual construction currently done in test_oms_launch.py. It is called by the recursive _traverse method so all platforms starting from a given base platform are prepared. Note: For simplicity in this test, sites are organized in the same hierarchical way as the platforms themselves. @param platform_id ID of the platform to be visited @param parent_platform_objs dict of objects associated to parent platform, if any. @retval a dict of associated objects similar to those in test_oms_launch """ site__obj = IonObject(RT.PlatformSite, name='%s_PlatformSite' % platform_id, description='%s_PlatformSite platform site' % platform_id) site_id = self.omsclient.create_platform_site(site__obj) if parent_platform_objs: # establish hasSite association with the parent self.rrclient.create_association( subject=parent_platform_objs['site_id'], predicate=PRED.hasSite, object=site_id) # prepare platform attributes and ports: monitor_attributes = self._prepare_platform_attributes(platform_id) ports = self._prepare_platform_ports(platform_id) device__obj = IonObject(RT.PlatformDevice, name='%s_PlatformDevice' % platform_id, description='%s_PlatformDevice platform device' % platform_id, ports=ports, platform_monitor_attributes = monitor_attributes) device_id = self.imsclient.create_platform_device(device__obj) self.imsclient.assign_platform_model_to_platform_device(self.platformModel_id, device_id) self.rrclient.create_association(subject=site_id, predicate=PRED.hasDevice, object=device_id) self.damsclient.register_instrument(instrument_id=device_id) if parent_platform_objs: # establish hasDevice association with the parent self.rrclient.create_association( subject=parent_platform_objs['device_id'], predicate=PRED.hasDevice, object=device_id) agent__obj = IonObject(RT.PlatformAgent, name='%s_PlatformAgent' % platform_id, description='%s_PlatformAgent platform agent' % platform_id) agent_id = self.imsclient.create_platform_agent(agent__obj) if parent_platform_objs: # add this platform_id to parent's children: parent_platform_objs['children'].append(platform_id) self.imsclient.assign_platform_model_to_platform_agent(self.platformModel_id, agent_id) # agent_instance_obj = IonObject(RT.PlatformAgentInstance, # name='%s_PlatformAgentInstance' % platform_id, # description="%s_PlatformAgentInstance" % platform_id) # # agent_instance_id = self.imsclient.create_platform_agent_instance( # agent_instance_obj, agent_id, device_id) plat_objs = { 'platform_id': platform_id, 'site__obj': site__obj, 'site_id': site_id, 'device__obj': device__obj, 'device_id': device_id, 'agent__obj': agent__obj, 'agent_id': agent_id, # 'agent_instance_obj': agent_instance_obj, # 'agent_instance_id': agent_instance_id, 'children': [] } log.info("plat_objs for platform_id %r = %s", platform_id, str(plat_objs)) self.agent_device_map[platform_id] = device__obj stream_config = self._create_stream_config(plat_objs) self.agent_streamconfig_map[platform_id] = stream_config # self._start_data_subscriber(agent_instance_id, stream_config) return plat_objs def _prepare_platform_attributes(self, platform_id): """ Returns the list of PlatformMonitorAttributes objects corresponding to the attributes associated to the given platform. """ result = self.rsn_oms.getPlatformAttributes(platform_id) self.assertTrue(platform_id in result) ret_infos = result[platform_id] monitor_attributes = [] for attrName, attrDfn in ret_infos.iteritems(): log.debug("platform_id=%r: preparing attribute=%r", platform_id, attrName) monitor_rate = attrDfn['monitorCycleSeconds'] units = attrDfn['units'] plat_attr_obj = IonObject(OT.PlatformMonitorAttributes, id=attrName, monitor_rate=monitor_rate, units=units) monitor_attributes.append(plat_attr_obj) return monitor_attributes def _prepare_platform_ports(self, platform_id): """ Returns the list of PlatformPort objects corresponding to the ports associated to the given platform. """ result = self.rsn_oms.getPlatformPorts(platform_id) self.assertTrue(platform_id in result) port_dict = result[platform_id] ports = [] for port_id, port in port_dict.iteritems(): log.debug("platform_id=%r: preparing port=%r", platform_id, port_id) ip_address = port['comms']['ip'] plat_port_obj = IonObject(OT.PlatformPort, port_id=port_id, ip_address=ip_address) ports.append(plat_port_obj) return ports def _create_stream_config(self, plat_objs): platform_id = plat_objs['platform_id'] device_id = plat_objs['device_id'] #create the log data product self.dp_obj.name = '%s platform_eng data' % platform_id data_product_id = self.dpclient.create_data_product(data_product=self.dp_obj, stream_definition_id=self.platform_eng_stream_def_id) self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True) stream_config = self._build_stream_config(stream_ids[0]) return stream_config def _build_stream_config(self, stream_id=''): platform_eng_dictionary = DatasetManagementService.get_parameter_dictionary_by_name('platform_eng_parsed') #get the streamroute object from pubsub by passing the stream_id stream_def_ids, _ = self.rrclient.find_objects(stream_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) stream_route = self.pubsubcli.read_stream_route(stream_id=stream_id) stream_config = {'routing_key' : stream_route.routing_key, 'stream_id' : stream_id, 'stream_definition_ref' : stream_def_ids[0], 'exchange_point' : stream_route.exchange_point, 'parameter_dictionary':platform_eng_dictionary.dump()} return stream_config def _set_platform_agent_instances(self): """ Once most of the objs/defs associated with all platforms are in place, this method creates and associates the PlatformAgentInstance elements. """ self.platform_configs = {} for platform_id, plat_objs in self.all_platforms.iteritems(): PLATFORM_CONFIG = self.platform_configs[platform_id] = { 'platform_id': platform_id, 'platform_topology': self.topology, # 'agent_device_map': self.agent_device_map, 'agent_streamconfig_map': self.agent_streamconfig_map, 'driver_config': DVR_CONFIG, } agent_config = { 'platform_config': PLATFORM_CONFIG, } agent_instance_obj = IonObject(RT.PlatformAgentInstance, name='%s_PlatformAgentInstance' % platform_id, description="%s_PlatformAgentInstance" % platform_id, agent_config=agent_config) agent_id = plat_objs['agent_id'] device_id = plat_objs['device_id'] agent_instance_id = self.imsclient.create_platform_agent_instance( agent_instance_obj, agent_id, device_id) plat_objs['agent_instance_obj'] = agent_instance_obj plat_objs['agent_instance_id'] = agent_instance_id stream_config = self.agent_streamconfig_map[platform_id] self._start_data_subscriber(agent_instance_id, stream_config) def _start_data_subscriber(self, stream_name, stream_config): """ Starts data subscriber for the given stream_name and stream_config """ def consume_data(message, stream_route, stream_id): # A callback for processing subscribed-to data. log.info('Subscriber received data message: %s.', str(message)) self._samples_received.append(message) self._async_data_result.set() log.info('_start_data_subscriber stream_name=%r', stream_name) stream_id = stream_config['stream_id'] # Create subscription for the stream exchange_name = '%s_queue' % stream_name self.container.ex_manager.create_xn_queue(exchange_name).purge() sub = StandaloneStreamSubscriber(exchange_name, consume_data) sub.start() self._data_subscribers.append(sub) sub_id = self.pubsubcli.create_subscription(name=exchange_name, stream_ids=[stream_id]) self.pubsubcli.activate_subscription(sub_id) sub.subscription_id = sub_id def _stop_data_subscribers(self): """ Stop the data subscribers on cleanup. """ try: for sub in self._data_subscribers: if hasattr(sub, 'subscription_id'): try: self.pubsubcli.deactivate_subscription(sub.subscription_id) except: pass self.pubsubcli.delete_subscription(sub.subscription_id) sub.stop() finally: self._data_subscribers = [] def _start_event_subscriber(self, event_type="PlatformAlarmEvent", sub_type="power"): """ Starts event subscriber for events of given event_type ("PlatformAlarmEvent" by default) and given sub_type ("power" by default). """ # TODO note: ion-definitions still using 'PlatformAlarmEvent' but we # should probably define 'PlatformExternalEvent' or something like that. def consume_event(evt, *args, **kwargs): # A callback for consuming events. log.info('Event subscriber received evt: %s.', str(evt)) self._events_received.append(evt) self._async_event_result.set(evt) sub = EventSubscriber(event_type=event_type, sub_type=sub_type, callback=consume_event) sub.start() log.info("registered event subscriber for event_type=%r, sub_type=%r", event_type, sub_type) self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) def _stop_event_subscribers(self): """ Stops the event subscribers on cleanup. """ try: for sub in self._event_subscribers: if hasattr(sub, 'subscription_id'): try: self.pubsubcli.deactivate_subscription(sub.subscription_id) except: pass self.pubsubcli.delete_subscription(sub.subscription_id) sub.stop() finally: self._event_subscribers = [] def test_oms_create_and_launch(self): # pick a base platform: base_platform_id = BASE_PLATFORM_ID # and trigger the traversal of the branch rooted at that base platform # to create corresponding ION objects and configuration dictionaries: base_platform_objs = self._traverse(base_platform_id) # now that most of the topology information is there, add the # PlatformAgentInstance elements self._set_platform_agent_instances() base_platform_config = self.platform_configs[base_platform_id] log.info("base_platform_id = %r", base_platform_id) log.info("topology = %s", str(self.topology)) #------------------------------- # Launch Base Platform AgentInstance, connect to the resource agent client #------------------------------- agent_instance_id = base_platform_objs['agent_instance_id'] pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id) gate = ProcessStateGate(self.processdispatchclient.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds") agent_instance_obj= self.imsclient.read_instrument_agent_instance(agent_instance_id) log.debug('test_oms_create_and_launch: Platform agent instance obj: %s', str(agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._pa_client = ResourceAgentClient('paclient', name=agent_instance_obj.agent_process_id, process=FakeProcess()) log.debug(" test_oms_create_and_launch:: got pa client %s", str(self._pa_client)) log.debug("base_platform_config =\n%s", base_platform_config) # ping_agent can be issued before INITIALIZE retval = self._pa_client.ping_agent(timeout=TIMEOUT) log.debug( 'Base Platform ping_agent = %s', str(retval) ) # issue INITIALIZE command to the base platform, which will launch the # creation of the whole platform hierarchy rooted at base_platform_config['platform_id'] # cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE, kwargs=dict(plat_config=base_platform_config)) cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform INITIALIZE = %s', str(retval) ) # GO_ACTIVE cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform GO_ACTIVE = %s', str(retval) ) # RUN: this command includes the launch of the resource monitoring greenlets cmd = AgentCommand(command=PlatformAgentEvent.RUN) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform RUN = %s', str(retval) ) # START_EVENT_DISPATCH kwargs = dict(params="TODO set params") cmd = AgentCommand(command=PlatformAgentEvent.START_EVENT_DISPATCH, kwargs=kwargs) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) self.assertTrue(retval.result is not None) # wait for data sample # just wait for at least one -- see consume_data above log.info("waiting for reception of a data sample...") self._async_data_result.get(timeout=DATA_TIMEOUT) self.assertTrue(len(self._samples_received) >= 1) log.info("waiting a bit more for reception of more data samples...") sleep(10) log.info("Got data samples: %d", len(self._samples_received)) # wait for event # just wait for at least one event -- see consume_event above log.info("waiting for reception of an event...") self._async_event_result.get(timeout=EVENT_TIMEOUT) log.info("Received events: %s", len(self._events_received)) # STOP_EVENT_DISPATCH cmd = AgentCommand(command=PlatformAgentEvent.STOP_EVENT_DISPATCH) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) self.assertTrue(retval.result is not None) # GO_INACTIVE cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform GO_INACTIVE = %s', str(retval) ) # RESET: Resets the base platform agent, which includes termination of # its sub-platforms processes: cmd = AgentCommand(command=PlatformAgentEvent.RESET) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform RESET = %s', str(retval) ) #------------------------------- # Stop Base Platform AgentInstance #------------------------------- self.imsclient.stop_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
class TestCTDTransformsIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) def create_logger(self, name, stream_id=''): # logger process producer_definition = ProcessDefinition(name=name+'_logger') producer_definition.executable = { 'module':'ion.processes.data.stream_granule_logger', 'class':'StreamGranuleLogger' } logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition) configuration = { 'process':{ 'stream_id':stream_id, } } pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration) return pid def test_createTransformsThenActivateInstrument(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel", model="SBE37IMModel" ) try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" %ex) log.debug( 'new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="ion.agents.instrument.instrument_agent", driver_class="InstrumentAgent" ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" %ex) log.debug( 'new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) try: instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" %ex) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", driver_module='mi.instrument.seabird.sbe37smb.ooicore.driver', driver_class='SBE37Driver', comms_device_address='sbe37-simulator.oceanobservatories.org', comms_device_port=4001, port_agent_work_dir='/tmp/', port_agent_delimeter=['<<','>>'] ) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) # create a stream definition for the data from the ctd simulator ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def) log.debug( 'new Stream Definition id = %s', instDevice_id) log.debug( 'Creating new CDM data product with a stream definition') craft = CoverageCraft sdom, tdom = craft.create_domains() sdom = sdom.dump() tdom = tdom.dump() parameter_dictionary = craft.create_parameters() parameter_dictionary = parameter_dictionary.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary) log.debug( 'new dp_id = %s', ctd_parsed_data_product) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True) log.debug( 'Data product streams1 = %s', stream_ids) pid = self.create_logger('ctd_parsed', stream_ids[0] ) self.loggerpids.append(pid) print 'test_createTransformsThenActivateInstrument: Data product streams1 = ', stream_ids #------------------------------- # Create CTD Raw as the second data product #------------------------------- log.debug( 'Creating new RAW data product with a stream definition') raw_stream_def = SBE37_RAW_stream_definition() raw_stream_def_id = self.pubsubclient.create_stream_definition(container=raw_stream_def) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id, parameter_dictionary) log.debug( 'new dp_id = %s', str(ctd_raw_data_product)) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_raw_data_product) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) log.debug( 'Data product streams2 = %s', str(stream_ids)) #todo: attaching the taxonomy to the stream is a TEMPORARY measure # Create taxonomies for both parsed and attach to the stream RawTax = TaxyTool() RawTax.add_taxonomy_set('raw_fixed','Fixed length bytes in an array of records') RawTax.add_taxonomy_set('raw_blob','Unlimited length bytes in an array') # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True) print 'Data product streams2 = ', stream_ids #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition ctd_L0_all") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all', process_source='some_source_reference') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new ctd_L0_all data process definition: %s" %ex) #------------------------------- # L1 Conductivity: Data Process Definition #------------------------------- log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1ConductivityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_conductivity', description='create the L1 conductivity data product', module='ion.processes.data.transforms.ctd.ctd_L1_conductivity', class_name='CTDL1ConductivityTransform', process_source='CTDL1ConductivityTransform source code here...') try: ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex) #------------------------------- # L1 Pressure: Data Process Definition #------------------------------- log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1PressureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_pressure', description='create the L1 pressure data product', module='ion.processes.data.transforms.ctd.ctd_L1_pressure', class_name='CTDL1PressureTransform', process_source='CTDL1PressureTransform source code here...') try: ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex) #------------------------------- # L1 Temperature: Data Process Definition #------------------------------- log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1TemperatureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_temperature', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L1_temperature', class_name='CTDL1TemperatureTransform', process_source='CTDL1TemperatureTransform source code here...') try: ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex) #------------------------------- # L2 Salinity: Data Process Definition #------------------------------- log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition SalinityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_salinity', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_salinity', class_name='SalinityTransform', process_source='SalinityTransform source code here...') try: ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new SalinityTransform data process definition: %s" %ex) #------------------------------- # L2 Density: Data Process Definition #------------------------------- log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition DensityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_density', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_density', class_name='DensityTransform', process_source='DensityTransform source code here...') try: ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new DensityTransform data process definition: %s" %ex) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity = L0_conductivity_stream_definition() outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_conductivity, name='L0_Conductivity') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id ) outgoing_stream_l0_pressure = L0_pressure_stream_definition() outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_pressure, name='L0_Pressure') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id ) outgoing_stream_l0_temperature = L0_temperature_stream_definition() outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_temperature, name='L0_Temperature') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id ) self.output_products={} log.debug("test_createTransformsThenActivateInstrument: create output data product L0 conductivity") ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id, parameter_dictionary) self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id) log.debug("test_createTransformsThenActivateInstrument: create output data product L0 pressure") ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id, parameter_dictionary) self.output_products['pressure'] = ctd_l0_pressure_output_dp_id self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id) log.debug("test_createTransformsThenActivateInstrument: create output data product L0 temperature") ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id, parameter_dictionary) self.output_products['temperature'] = ctd_l0_temperature_output_dp_id self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id) #------------------------------- # L1 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l1_conductivity = L1_conductivity_stream_definition() outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l1_conductivity, name='L1_conductivity') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id ) outgoing_stream_l1_pressure = L1_pressure_stream_definition() outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l1_pressure, name='L1_Pressure') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id ) outgoing_stream_l1_temperature = L1_temperature_stream_definition() outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l1_temperature, name='L1_Temperature') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id ) log.debug("test_createTransformsThenActivateInstrument: create output data product L1 conductivity") ctd_l1_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L1_Conductivity', description='transform output L1 conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_conductivity_output_dp_obj, outgoing_stream_l1_conductivity_id, parameter_dictionary) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l1_conductivity_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(ctd_l1_conductivity_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l1_conductivity', stream_ids[0] ) self.loggerpids.append(pid) log.debug("test_createTransformsThenActivateInstrument: create output data product L1 pressure") ctd_l1_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L1_Pressure', description='transform output L1 pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id, parameter_dictionary ) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l1_pressure_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(ctd_l1_pressure_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l1_pressure', stream_ids[0] ) self.loggerpids.append(pid) log.debug("test_createTransformsThenActivateInstrument: create output data product L1 temperature") ctd_l1_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L1_Temperature', description='transform output L1 temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_temperature_output_dp_obj, outgoing_stream_l1_temperature_id, parameter_dictionary) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l1_temperature_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(ctd_l1_temperature_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l1_temperature', stream_ids[0] ) self.loggerpids.append(pid) #------------------------------- # L2 Salinity - Density: Output Data Products #------------------------------- outgoing_stream_l2_salinity = L2_practical_salinity_stream_definition() outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l2_salinity, name='L2_salinity') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id ) outgoing_stream_l2_density = L2_density_stream_definition() outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l2_density, name='L2_Density') self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id ) log.debug("test_createTransformsThenActivateInstrument: create output data product L2 Salinity") ctd_l2_salinity_output_dp_obj = IonObject(RT.DataProduct, name='L2_Salinity', description='transform output L2 salinity', temporal_domain = tdom, spatial_domain = sdom) ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product(ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id, parameter_dictionary) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l2_salinity_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l2_salinity', stream_ids[0] ) self.loggerpids.append(pid) log.debug("test_createTransformsThenActivateInstrument: create output data product L2 Density") ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, name='L2_Density', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product(ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id, parameter_dictionary) self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l2_density_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(ctd_l2_density_output_dp_id, PRED.hasStream, None, True) pid = self.create_logger('ctd_l2_density', stream_ids[0] ) self.loggerpids.append(pid) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- log.debug("test_createTransformsThenActivateInstrument: create L0 all data_process start") try: ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products) self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) log.debug("test_createTransformsThenActivateInstrument: create L0 all data_process return") #------------------------------- # L1 Conductivity: Create the data process #------------------------------- log.debug("test_createTransformsThenActivateInstrument: create L1 Conductivity data_process start") try: l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_conductivity_dprocdef_id, [ctd_l0_conductivity_output_dp_id], {'output':ctd_l1_conductivity_output_dp_id}) self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) log.debug("test_createTransformsThenActivateInstrument: create L1 Conductivity data_process return") #------------------------------- # L1 Pressure: Create the data process #------------------------------- log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process start") try: l1_pressure_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id], {'output':ctd_l1_pressure_output_dp_id}) self.dataprocessclient.activate_data_process(l1_pressure_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process return") #------------------------------- # L1 Temperature: Create the data process #------------------------------- log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process start") try: l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_temperature_dprocdef_id, [ctd_l0_temperature_output_dp_id], {'output':ctd_l1_temperature_output_dp_id}) self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process return") #------------------------------- # L2 Salinity: Create the data process #------------------------------- log.debug("test_createTransformsThenActivateInstrument: create L2_salinity data_process start") try: l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product], {'output':ctd_l2_salinity_output_dp_id}) self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) log.debug("test_createTransformsThenActivateInstrument: create L2_salinity data_process return") #------------------------------- # L2 Density: Create the data process #------------------------------- log.debug("test_createTransformsThenActivateInstrument: create L2_Density data_process start") try: l2_density_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_density_dprocdef_id, [ctd_parsed_data_product], {'output':ctd_l2_density_output_dp_id}) self.dataprocessclient.activate_data_process(l2_density_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) log.debug("test_createTransformsThenActivateInstrument: create L2_Density data_process return") #------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id) print 'test_createTransformsThenActivateInstrument: Instrument agent instance obj: = ', inst_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('iaclient', name=inst_agent_instance_obj.agent_process_id, process=FakeProcess()) print 'activate_instrument: got ia client %s', self._ia_client log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) #------------------------------- # Streaming #------------------------------- cmd = AgentCommand(command='initialize') retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentStream: initialize %s", str(retval)) log.debug("test_activateInstrumentStream: Sending go_active command (L4-CI-SA-RQ-334)") cmd = AgentCommand(command='go_active') reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentStream: return value from go_active %s", str(reply)) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: current state after sending go_active command %s (L4-CI-SA-RQ-334)", str(state)) cmd = AgentCommand(command='run') reply = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: return from run state: %s", str(state)) # Make sure the sampling rate and transmission are sane. params = { SBE37Parameter.NAVG : 1, SBE37Parameter.INTERVAL : 5, SBE37Parameter.TXREALTIME : True } self._ia_client.set_param(params) log.debug("test_activateInstrumentStream: calling go_streaming ") cmd = AgentCommand(command='go_streaming') reply = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: return from go_streaming state: %s", str(state)) time.sleep(7) log.debug("test_activateInstrumentStream: calling go_observatory") cmd = AgentCommand(command='go_observatory') reply = self._ia_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("test_activateInstrumentStream: return from go_observatory state %s", str(state)) log.debug("test_activateInstrumentStream: calling reset ") cmd = AgentCommand(command='reset') reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentStream: return from reset state:%s", str(reply.result)) time.sleep(2) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dms_cli = DatasetManagementServiceClient() dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent(handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls']) eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.individual_names_given = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.individual_names_given = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'slocum_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['base_url'] = 'test_data/slocum/' dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat' dset.dataset_description.parameters['date_pattern'] = '%Y %j' dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ 'c_wpt_y_lmc', 'sci_water_cond', 'm_y_lmc', 'u_hd_fin_ap_inflection_holdoff', 'sci_m_present_time', 'm_leakdetect_voltage_forward', 'sci_bb3slo_b660_scaled', 'c_science_send_all', 'm_gps_status', 'm_water_vx', 'm_water_vy', 'c_heading', 'sci_fl3slo_chlor_units', 'u_hd_fin_ap_gain', 'm_vacuum', 'u_min_water_depth', 'm_gps_lat', 'm_veh_temp', 'f_fin_offset', 'u_hd_fin_ap_hardover_holdoff', 'c_alt_time', 'm_present_time', 'm_heading', 'sci_bb3slo_b532_scaled', 'sci_fl3slo_cdom_units', 'm_fin', 'x_cycle_overrun_in_ms', 'sci_water_pressure', 'u_hd_fin_ap_igain', 'sci_fl3slo_phyco_units', 'm_battpos', 'sci_bb3slo_b470_scaled', 'm_lat', 'm_gps_lon', 'sci_ctd41cp_timestamp', 'm_pressure', 'c_wpt_x_lmc', 'c_ballast_pumped', 'x_lmc_xy_source', 'm_lon', 'm_avg_speed', 'sci_water_temp', 'u_pitch_ap_gain', 'm_roll', 'm_tot_num_inflections', 'm_x_lmc', 'u_pitch_ap_deadband', 'm_final_water_vy', 'm_final_water_vx', 'm_water_depth', 'm_leakdetect_voltage', 'u_pitch_max_delta_battpos', 'm_coulomb_amphr', 'm_pitch', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='slocum_model') # dsrc_model.model = 'SLOCUM' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) #create temp streamdef so the data product can create the stream pc_list = [] for pc_k, pc in self._create_parameter_dictionary().iteritems(): pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump())) pdict_id = dms_cli.create_parameter_dictionary('slocum_param_dict', pc_list) streamdef_id = pubsub_cli.create_stream_definition(name="slocum_stream_def", description="stream def for slocum testing", parameter_dictionary_id=pdict_id) # dpms_cli.create_data_product() # Generate the data product and associate it to the ExternalDataset tdom, sdom = time_series_domain() tdom, sdom = tdom.dump(), sdom.dump() dprod = IonObject(RT.DataProduct, name='slocum_parsed_product', description='parsed slocum product', temporal_domain=tdom, spatial_domain=sdom) dproduct_id = dpms_cli.create_data_product(data_product=dprod, stream_definition_id=streamdef_id) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id})) # Create the logger for receiving publications _, stream_route, _ = self.create_stream_and_logger(name='slocum', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'stream_route': stream_route, 'stream_def': streamdef_id, 'external_dataset_res': dset, 'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 20, }