def _start_platform(self): """ Starts the given platform waiting for it to transition to the UNINITIALIZED state (note that the agent starts in the LAUNCHING state). More in concrete the sequence of steps here are: - prepares subscriber to receive the UNINITIALIZED state transition - launches the platform process - waits for the start of the process - waits for the transition to the UNINITIALIZED state """ ############################################################## # prepare to receive the UNINITIALIZED state transition: async_res = AsyncResult() def consume_event(evt, *args, **kwargs): log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin) if evt.state == PlatformAgentState.UNINITIALIZED: async_res.set(evt) # start subscriber: sub = EventSubscriber(event_type="ResourceAgentStateEvent", origin=self.platform_device_id, callback=consume_event) sub.start() log.info( "registered event subscriber to wait for state=%r from origin %r", PlatformAgentState.UNINITIALIZED, self.platform_device_id) #self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) ############################################################## # now start the platform: agent_instance_id = self.platform_agent_instance_id log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id) pid = self.imsclient.start_platform_agent_instance( platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start agent_instance_obj = self.imsclient.read_platform_agent_instance( agent_instance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, self.platform_device_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (90), "The platform agent instance did not spawn in 90 seconds") # Start a resource agent client to talk with the agent. self._pa_client = ResourceAgentClient(self.platform_device_id, name=gate.process_id, process=FakeProcess()) log.debug("got platform agent client %s", str(self._pa_client)) ############################################################## # wait for the UNINITIALIZED event: async_res.get(timeout=self._receive_timeout)
def poll_instrument_agent_instance(self, instrument_agent_instance_id, instrument_device_id): inst_agent_instance_obj = self.instrument_management.read_instrument_agent_instance(instrument_agent_instance_id) gate = AgentProcessStateGate(self.process_dispatcher.read_process, instrument_device_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) return gate.process_id
def test_activate_rsn_vel3d(self): log.info( "--------------------------------------------------------------------------------------------------------" ) # load_parameter_scenarios self.container.spawn_process( "Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict( op="load", scenario="BETA", path="master", categories= "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition", clearcols="owner_id,org_ids", assets="res/preload/r2_ioc/ooi_assets", parseooi="True", )) self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='Vel3DMModel', description="Vel3DMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('test_activate_rsn_vel3d new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') vel3d_b_sample = StreamConfiguration( stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample') vel3d_b_engineering = StreamConfiguration( stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering') RSN_VEL3D_01 = { 'DEV_ADDR': "10.180.80.6", 'DEV_PORT': 2101, 'DATA_PORT': 1026, 'CMD_PORT': 1025, 'PA_BINARY': "port_agent" } # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='Vel3DAgent', description="Vel3DAgent", driver_uri= "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg", stream_configurations=[ raw_config, vel3d_b_sample, vel3d_b_engineering ]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('test_activate_rsn_vel3d new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_activate_rsn_vel3d: Create instrument resource to represent the Vel3D ' ) instDevice_obj = IonObject(RT.InstrumentDevice, name='Vel3DDevice', description="Vel3DDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug("test_activate_rsn_vel3d: new InstrumentDevice id = %s ", instDevice_id) port_agent_config = { 'device_addr': '10.180.80.6', 'device_port': 2101, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 1025, 'data_port': 1026, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='Vel3DAgentInstance', description="Vel3DAgentInstance", port_agent_config=port_agent_config, alerts=[]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) parsed_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_sample', id_only=True) parsed_sample_stream_def_id = self.pubsubcli.create_stream_definition( name='vel3d_b_sample', parameter_dictionary_id=parsed_sample_pdict_id) parsed_eng_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_engineering', id_only=True) parsed_eng_stream_def_id = self.pubsubcli.create_stream_definition( name='vel3d_b_engineering', parameter_dictionary_id=parsed_eng_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubcli.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='vel3d_b_sample', description='vel3d_b_sample') sample_data_product_id = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_sample_stream_def_id) log.debug('new dp_id = %s', sample_data_product_id) self.dpclient.activate_data_product_persistence( data_product_id=sample_data_product_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=sample_data_product_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(sample_data_product_id, PRED.hasStream, None, True) log.debug('sample_data_product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(sample_data_product_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for sample_data_product = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('vel3d_b_sample', stream_ids[0]) self.loggerpids.append(pid) dp_obj = IonObject(RT.DataProduct, name='vel3d_b_engineering', description='vel3d_b_engineering') eng_data_product_id = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_eng_stream_def_id) log.debug('new dp_id = %s', eng_data_product_id) self.dpclient.activate_data_product_persistence( data_product_id=eng_data_product_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=eng_data_product_id) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test') data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug('test_activate_rsn_vel3d Data product streams2 = %s', str(stream_ids)) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) log.debug('test_activate_rsn_vel3d Data set for data_product_id2 = %s', dataset_ids[0]) self.raw_dataset = dataset_ids[0] def start_instrument_agent(): self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) gevent.joinall([gevent.spawn(start_instrument_agent)]) #cleanup self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) def check_state(label, desired_state): actual_state = self._ia_client.get_agent_state() log.debug("%s instrument agent is in state '%s'", label, actual_state) self.assertEqual(desired_state, actual_state) log.debug("test_activate_rsn_vel3d: got ia client %s", str(self._ia_client)) check_state("just-spawned", ResourceAgentState.UNINITIALIZED) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: initialize %s", str(retval)) check_state("initialized", ResourceAgentState.INACTIVE) log.debug("test_activate_rsn_vel3d Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: return value from go_active %s", str(reply)) check_state("activated", ResourceAgentState.IDLE) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("current state after sending go_active command %s", str(state)) # cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: run %s", str(reply)) check_state("commanded", ResourceAgentState.COMMAND) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug("current state after sending run command %s", str(state)) # cmd = AgentCommand(command=ProtocolEvent.START_AUTOSAMPLE) # reply = self._ia_client.execute_agent(cmd) # log.debug("test_activate_rsn_vel3d: run %s" , str(reply)) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) # # gevent.sleep(5) # # cmd = AgentCommand(command=ProtocolEvent.STOP_AUTOSAMPLE) # reply = self._ia_client.execute_agent(cmd) # log.debug("test_activate_rsn_vel3d: run %s" , str(reply)) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) # # cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) # retval = self._ia_client.execute_agent(cmd) # state = retval.result # log.debug("current state after sending STOP_AUTOSAMPLE command %s" , str(state)) # # cmd = AgentCommand(command=ResourceAgentEvent.PAUSE) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.STOPPED, state) # # cmd = AgentCommand(command=ResourceAgentEvent.RESUME) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) # # cmd = AgentCommand(command=ResourceAgentEvent.CLEAR) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.IDLE, state) # # cmd = AgentCommand(command=ResourceAgentEvent.RUN) # retval = self._ia_client.execute_agent(cmd) # state = self._ia_client.get_agent_state() # self.assertEqual(ResourceAgentState.COMMAND, state) log.debug("test_activate_rsn_vel3d: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) log.debug("test_activate_rsn_vel3d: return from reset %s", str(reply)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset) self.assertIsInstance(replay_data_raw, Granule) rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw) log.debug("RDT raw: %s", str(rdt_raw.pretty_print())) self.assertIn('raw', rdt_raw) raw_vals = rdt_raw['raw'] #-------------------------------------------------------------------------------- # Deactivate loggers #-------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) self.dpclient.delete_data_product(sample_data_product_id) self.dpclient.delete_data_product(eng_data_product_id) self.dpclient.delete_data_product(data_product_id2)
def test_activateInstrumentSample(self): self.loggerpids = [] # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ' ) instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config, alerts=[]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubcli.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug('Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('ctd_parsed', stream_ids[0]) self.loggerpids.append(pid) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', data_product_id2) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) # setup notifications for the device and parsed data product user_id_1 = self._create_notification(user_name='user_1', instrument_id=instDevice_id, product_id=data_product_id1) #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users --------- user_id_2 = self._create_notification(user_name='user_2', instrument_id=instDevice_id, product_id=data_product_id2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True) log.debug('Data product streams2 = %s', str(stream_ids)) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id2 = %s', dataset_ids[0]) self.raw_dataset = dataset_ids[0] #elastic search debug es_indexes, _ = self.container.resource_registry.find_resources( restype='ElasticSearchIndex') log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes]) log.debug('Bootstrap %s', CFG.bootstrap.use_es) def start_instrument_agent(): self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) gevent.joinall([gevent.spawn(start_instrument_agent)]) #cleanup self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) log.debug("test_activateInstrumentSample: got ia client %s", str(self._ia_client)) cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: initialize %s", str(retval)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.INACTIVE, state) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrument: return value from go_active %s", str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.IDLE, state) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: run %s", str(reply)) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=ResourceAgentEvent.PAUSE) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.STOPPED, state) cmd = AgentCommand(command=ResourceAgentEvent.RESUME) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=ResourceAgentEvent.CLEAR) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.IDLE, state) cmd = AgentCommand(command=ResourceAgentEvent.RUN) retval = self._ia_client.execute_agent(cmd) state = self._ia_client.get_agent_state() self.assertEqual(ResourceAgentState.COMMAND, state) cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE) for i in xrange(10): retval = self._ia_client.execute_resource(cmd) log.debug("test_activateInstrumentSample: return from sample %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s", str(reply)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset) self.assertIsInstance(replay_data_raw, Granule) rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw) log.debug("RDT raw: %s", str(rdt_raw.pretty_print())) self.assertIn('raw', rdt_raw) raw_vals = rdt_raw['raw'] all_raw = "".join(raw_vals) # look for 't' entered after a prompt -- ">t" t_commands = all_raw.count(">t") if 10 != t_commands: log.error("%s raw_vals: ", len(raw_vals)) for i, r in enumerate(raw_vals): log.error("raw val %s: %s", i, [r]) self.fail("Expected 10 't' strings in raw_vals, got %s" % t_commands) else: log.debug("%s raw_vals: ", len(raw_vals)) for i, r in enumerate(raw_vals): log.debug("raw val %s: %s", i, [r]) replay_data_parsed = self.dataretrieverclient.retrieve( self.parsed_dataset) self.assertIsInstance(replay_data_parsed, Granule) rdt_parsed = RecordDictionaryTool.load_from_granule(replay_data_parsed) log.debug("test_activateInstrumentSample: RDT parsed: %s", str(rdt_parsed.pretty_print())) self.assertIn('temp', rdt_parsed) temp_vals = rdt_parsed['temp'] pressure_vals = rdt_parsed['pressure'] if 10 != len(temp_vals): log.error("%s temp_vals: %s", len(temp_vals), temp_vals) self.fail("Expected 10 temp_vals, got %s" % len(temp_vals)) log.debug("l4-ci-sa-rq-138") """ Physical resource control shall be subject to policy Instrument management control capabilities shall be subject to policy The actor accessing the control capabilities must be authorized to send commands. note from maurice 2012-05-18: Talk to tim M to verify that this is policy. If it is then talk with Stephen to get an example of a policy test and use that to create a test stub that will be completed when we have instrument policies. Tim M: The "actor", aka observatory operator, will access the instrument through ION. """ #-------------------------------------------------------------------------------- # Get the extended data product to see if it contains the granules #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension( data_product_id=data_product_id1, user_id=user_id_1) def poller(extended_product): return len(extended_product.computed.user_notification_requests. value) == 1 poll(poller, extended_product, timeout=30) self._check_computed_attributes_of_extended_product( expected_data_product_id=data_product_id1, extended_data_product=extended_product) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id, user_id=user_id_1) #-------------------------------------------------------------------------------- # For the second user, check the extended data product and the extended intrument #-------------------------------------------------------------------------------- extended_product = self.dpclient.get_data_product_extension( data_product_id=data_product_id2, user_id=user_id_2) self._check_computed_attributes_of_extended_product( expected_data_product_id=data_product_id2, extended_data_product=extended_product) #-------------------------------------------------------------------------------- # Get the extended instrument #-------------------------------------------------------------------------------- extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id, user_id=user_id_2) self._check_computed_attributes_of_extended_instrument( expected_instrument_device_id=instDevice_id, extended_instrument=extended_instrument) #-------------------------------------------------------------------------------- # Deactivate loggers #-------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) self.dpclient.delete_data_product(data_product_id1) self.dpclient.delete_data_product(data_product_id2)
def test_createTransformsThenActivateInstrument(self): self.loggerpids = [] #------------------------------------------------------------------------------------- # Create InstrumentModel #------------------------------------------------------------------------------------- instModel_id = self._create_instrument_model() #------------------------------------------------------------------------------------- # Create InstrumentAgent #------------------------------------------------------------------------------------- instAgent_id = self._create_instrument_agent(instModel_id) #------------------------------------------------------------------------------------- # Create InstrumentDevice #------------------------------------------------------------------------------------- instDevice_id = self._create_instrument_device(instModel_id) #------------------------------------------------------------------------------------- # Create Instrument Agent Instance #------------------------------------------------------------------------------------- instAgentInstance_id = self._create_instrument_agent_instance( instAgent_id, instDevice_id) #------------------------------------------------------------------------------------- # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------- self._create_param_dicts() ctd_stream_def_id = self.pubsubclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=self.pdict_id) #------------------------------------------------------------------------------------- # Create two data products #------------------------------------------------------------------------------------- ctd_parsed_data_product = self._create_input_data_products( ctd_stream_def_id, instDevice_id) #------------------------------------------------------------------------------------- # Create data process definitions #------------------------------------------------------------------------------------- self._create_data_process_definitions() #------------------------------------------------------------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------------------------------------------------------------- outgoing_stream_l0_conductivity_id, \ outgoing_stream_l0_pressure_id, \ outgoing_stream_l0_temperature_id = self._create_stream_definitions() self.out_prod_ids = self._create_l0_output_data_products( outgoing_stream_l0_conductivity_id, outgoing_stream_l0_pressure_id, outgoing_stream_l0_temperature_id) self.outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition( name='L1_conductivity', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.outgoing_stream_l1_conductivity_id, self.ctd_L1_conductivity_dprocdef_id, binding='conductivity') self.outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition( name='L1_Pressure', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.outgoing_stream_l1_pressure_id, self.ctd_L1_pressure_dprocdef_id, binding='pressure') self.outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition( name='L1_Temperature', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.outgoing_stream_l1_temperature_id, self.ctd_L1_temperature_dprocdef_id, binding='temperature') self._create_l1_out_data_products() self._create_l2_out_data_products() #------------------------------------------------------------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------------------------------------------------------------- ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=self.ctd_L0_all_dprocdef_id, in_data_product_ids=[ctd_parsed_data_product], out_data_product_ids=self.out_prod_ids) self.dataprocessclient.activate_data_process( ctd_l0_all_data_process_id) data_process = self.rrclient.read(ctd_l0_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=ctd_l0_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) extended_process = self.dataprocessclient.get_data_process_extension( ctd_l0_all_data_process_id) self.assertEquals(extended_process.computed.operational_state.status, ComputedValueAvailability.NOTAVAILABLE) self.assertEquals(data_process.message_controllable, True) #------------------------------------------------------------------------------------- # L1 Conductivity: Create the data process #------------------------------------------------------------------------------------- l1_conductivity_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L1_conductivity_dprocdef_id, [self.ctd_l0_conductivity_output_dp_id], [self.ctd_l1_conductivity_output_dp_id]) self.dataprocessclient.activate_data_process( l1_conductivity_data_process_id) data_process = self.rrclient.read(l1_conductivity_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l1_conductivity_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L1 Pressure: Create the data process #------------------------------------------------------------------------------------- l1_pressure_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L1_pressure_dprocdef_id, [self.ctd_l0_pressure_output_dp_id], [self.ctd_l1_pressure_output_dp_id]) self.dataprocessclient.activate_data_process( l1_pressure_data_process_id) data_process = self.rrclient.read(l1_pressure_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l1_pressure_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L1 Temperature: Create the data process #------------------------------------------------------------------------------------- l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L1_temperature_dprocdef_id, [self.ctd_l0_temperature_output_dp_id], [self.ctd_l1_temperature_output_dp_id]) self.dataprocessclient.activate_data_process( l1_temperature_all_data_process_id) data_process = self.rrclient.read(l1_temperature_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l1_temperature_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L2 Salinity: Create the data process #------------------------------------------------------------------------------------- l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product], [self.ctd_l2_salinity_output_dp_id]) self.dataprocessclient.activate_data_process( l2_salinity_all_data_process_id) data_process = self.rrclient.read(l2_salinity_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l2_salinity_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # L2 Density: Create the data process #------------------------------------------------------------------------------------- l2_density_all_data_process_id = self.dataprocessclient.create_data_process( self.ctd_L2_density_dprocdef_id, [ctd_parsed_data_product], [self.ctd_l2_density_output_dp_id]) self.dataprocessclient.activate_data_process( l2_density_all_data_process_id) data_process = self.rrclient.read(l2_density_all_data_process_id) process_ids, _ = self.rrclient.find_objects( subject=l2_density_all_data_process_id, predicate=PRED.hasProcess, id_only=True) self.addCleanup(self.processdispatchclient.cancel_process, process_ids[0]) #------------------------------------------------------------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------------------------------------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) # Wait for instrument agent to spawn gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (15), "The instrument agent instance did not spawn in 15 seconds") # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) #------------------------------------------------------------------------------------- # Streaming #------------------------------------------------------------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) #todo ResourceAgentClient no longer has method set_param # # Make sure the sampling rate and transmission are sane. # params = { # SBE37Parameter.NAVG : 1, # SBE37Parameter.INTERVAL : 5, # SBE37Parameter.TXREALTIME : True # } # self._ia_client.set_param(params) #todo There is no ResourceAgentEvent attribute for go_streaming... so what should be the command for it? cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) # This gevent sleep is there to test the autosample time, which will show something different from default # only if the instrument runs for over a minute gevent.sleep(90) extended_instrument = self.imsclient.get_instrument_device_extension( instrument_device_id=instDevice_id) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) autosample_string = extended_instrument.computed.uptime.value autosampling_time = int(autosample_string.split()[4]) self.assertTrue(autosampling_time > 0) cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) #todo There is no ResourceAgentEvent attribute for go_observatory... so what should be the command for it? # log.debug("test_activateInstrumentStream: calling go_observatory") # cmd = AgentCommand(command='go_observatory') # reply = self._ia_client.execute_agent(cmd) # cmd = AgentCommand(command='get_current_state') # retval = self._ia_client.execute_agent(cmd) # state = retval.result # log.debug("test_activateInstrumentStream: return from go_observatory state %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) #------------------------------------------------------------------------------------------------- # Cleanup processes #------------------------------------------------------------------------------------------------- for pid in self.loggerpids: self.processdispatchclient.cancel_process(pid) #-------------------------------------------------------------------------------- # Cleanup data products #-------------------------------------------------------------------------------- dp_ids, _ = self.rrclient.find_resources(restype=RT.DataProduct, id_only=True) for dp_id in dp_ids: self.dataproductclient.delete_data_product(dp_id)
def test_resource_state_save_restore(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.IMS.create_instrument_model(instModel_obj) log.debug('new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent raw_config = StreamConfiguration( stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) instAgent_id = self.IMS.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.IMS.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_activateInstrumentSample: Create instrument resource to represent the SBE37 ' + '(SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.IMS.create_instrument_device( instrument_device=instDevice_obj) self.IMS.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config) instAgentInstance_id = self.IMS.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) spdict_id = self.DSC.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition( name='parsed', parameter_dictionary_id=spdict_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name( 'ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition( name='raw', parameter_dictionary_id=rpdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test') data_product_id1 = self.DP.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.DP.activate_data_product_persistence( data_product_id=data_product_id1) self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug('Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test') data_product_id2 = self.DP.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug('new dp_id = %s', str(data_product_id2)) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.DP.activate_data_product_persistence( data_product_id=data_product_id2) self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id2) # spin up agent self.IMS.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.IMS.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start instance_obj = self.IMS.read_instrument_agent_instance( instAgentInstance_id) gate = AgentProcessStateGate(self.PDC.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) # take snapshot of config snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot") snap_obj = self.RR.read_attachment(snap_id, include_content=True) #modify config instance_obj.driver_config["comms_config"] = "BAD_DATA" self.RR.update(instance_obj) #restore config self.IMS.restore_resource_state(instDevice_id, snap_id) instance_obj = self.RR.read(instAgentInstance_id) if "BAD_DATA" == instance_obj.driver_config["comms_config"]: print "Saved config:" print snap_obj.content self.fail("Saved config was not properly restored") self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"]) self.DP.delete_data_product(data_product_id1) self.DP.delete_data_product(data_product_id2)
def test_deploy_activate_full(self): # ensure no processes or pids are left around by agents or Sims #self.cleanupprocs() self.loggerpids = [] #------------------------------- # Create InstrumentModel #------------------------------- instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") try: instModel_id = self.imsclient.create_instrument_model( instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" % ex) #------------------------------- # Create InstrumentAgent #------------------------------- instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD) try: instAgent_id = self.imsclient.create_instrument_agent( instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" % ex) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) #------------------------------- # Create Instrument Site #------------------------------- instrumentSite_obj = IonObject(RT.InstrumentSite, name='instrumentSite1', description="SBE37IMInstrumentSite") try: instrumentSite_id = self.omsclient.create_instrument_site( instrument_site=instrumentSite_obj, parent_id='') except BadRequest as ex: self.fail("failed to create new InstrumentSite: %s" % ex) print 'test_deployAsPrimaryDevice: new instrumentSite id = ', instrumentSite_id self.omsclient.assign_instrument_model_to_instrument_site( instModel_id, instrumentSite_id) #------------------------------- # Logical Transform: Output Data Products #------------------------------- # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Old InstrumentDevice #------------------------------- instDevice_obj = IonObject( RT.InstrumentDevice, name='SBE37IMDeviceYear1', description="SBE37IMDevice for the FIRST year of deployment", serial_number="12345") try: oldInstDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, oldInstDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) print 'test_deployAsPrimaryDevice: new Year 1 InstrumentDevice id = ', oldInstDevice_id self.rrclient.execute_lifecycle_transition(oldInstDevice_id, LCE.DEPLOY) self.rrclient.execute_lifecycle_transition(oldInstDevice_id, LCE.ENABLE) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='SiteDataProduct', description='SiteDataProduct', temporal_domain=tdom, spatial_domain=sdom) instrument_site_output_dp_id = self.dataproductclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) self.damsclient.assign_data_product( input_resource_id=oldInstDevice_id, data_product_id=instrument_site_output_dp_id) #self.dataproductclient.activate_data_product_persistence(data_product_id=instrument_site_output_dp_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects( instrument_site_output_dp_id, PRED.hasStream, None, True) log.debug('Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects( instrument_site_output_dp_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] pid = self.create_logger('ctd_parsed', stream_ids[0]) self.loggerpids.append(pid) #------------------------------- # Create Old Deployment #------------------------------- deployment_obj = IonObject(RT.Deployment, name='first deployment') oldDeployment_id = self.omsclient.create_deployment(deployment_obj) # deploy this device to the logical slot self.imsclient.deploy_instrument_device(oldInstDevice_id, oldDeployment_id) self.omsclient.deploy_instrument_site(instrumentSite_id, oldDeployment_id) #------------------------------- # Create InstrumentAgentInstance for OldInstrumentDevice to hold configuration information # cmd_port=5556, evt_port=5557, comms_method="ethernet", comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port, #------------------------------- port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } raw_config = StreamConfiguration( stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgentInstance_obj = IonObject( RT.InstrumentAgentInstance, name='SBE37IMAgentInstanceYear1', description="SBE37IMAgentInstanceYear1", port_agent_config=port_agent_config, stream_configurations=[raw_config, parsed_config]) oldInstAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, oldInstDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() #------------------------------- # Create CTD Parsed as the Year 1 data product and attach to instrument #------------------------------- print 'Creating new CDM data product with a stream definition' dp_obj = IonObject(RT.DataProduct, name='ctd_parsed_year1', description='ctd stream test year 1', temporal_domain=tdom, spatial_domain=sdom) ctd_parsed_data_product_year1 = self.dataproductclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product_year1 self.damsclient.assign_data_product( input_resource_id=oldInstDevice_id, data_product_id=ctd_parsed_data_product_year1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects( ctd_parsed_data_product_year1, PRED.hasStream, None, True) print 'test_deployAsPrimaryDevice: Data product streams1 = ', stream_ids #------------------------------- # Create New InstrumentDevice #------------------------------- instDevice_obj_2 = IonObject( RT.InstrumentDevice, name='SBE37IMDeviceYear2', description="SBE37IMDevice for the SECOND year of deployment", serial_number="67890") try: newInstDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj_2) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, newInstDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) print 'test_deployAsPrimaryDevice: new Year 2 InstrumentDevice id = ', newInstDevice_id #set the LCSTATE self.rrclient.execute_lifecycle_transition(newInstDevice_id, LCE.DEPLOY) self.rrclient.execute_lifecycle_transition(newInstDevice_id, LCE.ENABLE) instDevice_obj_2 = self.rrclient.read(newInstDevice_id) log.debug( "test_deployAsPrimaryDevice: Create New InstrumentDevice LCSTATE: %s ", str(instDevice_obj_2.lcstate)) #------------------------------- # Create Old Deployment #------------------------------- deployment_obj = IonObject(RT.Deployment, name='second deployment') newDeployment_id = self.omsclient.create_deployment(deployment_obj) # deploy this device to the logical slot self.imsclient.deploy_instrument_device(newInstDevice_id, newDeployment_id) self.omsclient.deploy_instrument_site(instrumentSite_id, newDeployment_id) #------------------------------- # Create InstrumentAgentInstance for NewInstrumentDevice to hold configuration information #------------------------------- port_agent_config = { 'device_addr': 'sbe37-simulator.oceanobservatories.org', 'device_port': 4004, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 4005, 'data_port': 4006, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject( RT.InstrumentAgentInstance, name='SBE37IMAgentInstanceYear2', description="SBE37IMAgentInstanceYear2", port_agent_config=port_agent_config) newInstAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, newInstDevice_id) #------------------------------- # Create CTD Parsed as the Year 2 data product #------------------------------- dp_obj = IonObject(RT.DataProduct, name='ctd_parsed_year2', description='ctd stream test year 2', temporal_domain=tdom, spatial_domain=sdom) ctd_parsed_data_product_year2 = self.dataproductclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product_year2 self.damsclient.assign_data_product( input_resource_id=newInstDevice_id, data_product_id=ctd_parsed_data_product_year2) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects( ctd_parsed_data_product_year2, PRED.hasStream, None, True) print 'test_deployAsPrimaryDevice: Data product streams2 = ', stream_ids #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug( "test_deployAsPrimaryDevice: create data process definition ctd_L0_all" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new ctd_L0_all data process definition: %s" % ex) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition( name='L0_Conductivity', parameter_dictionary_id=parsed_pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity') outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition( name='L0_Pressure', parameter_dictionary_id=parsed_pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure') outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition( name='L0_Temperature', parameter_dictionary_id=parsed_pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature') self.out_prod_dict = {} log.debug( "test_deployAsPrimaryDevice: create output data product L0 conductivity" ) ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product( data_product=ctd_l0_conductivity_output_dp_obj, stream_definition_id=parsed_stream_def_id) self.out_prod_dict['conductivity'] = ctd_l0_conductivity_output_dp_id #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id) log.debug( "test_deployAsPrimaryDevice: create output data product L0 pressure" ) ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product( data_product=ctd_l0_pressure_output_dp_obj, stream_definition_id=parsed_stream_def_id) self.out_prod_dict['pressure'] = ctd_l0_pressure_output_dp_id #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id) log.debug( "test_deployAsPrimaryDevice: create output data product L0 temperature" ) ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product( data_product=ctd_l0_temperature_output_dp_obj, stream_definition_id=parsed_stream_def_id) self.out_prod_dict['temperature'] = ctd_l0_temperature_output_dp_id #self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process, listening to Sim1 (later: logical instrument output product) #------------------------------- log.debug( "test_deployAsPrimaryDevice: create L0 all data_process start") try: out_data_products = self.out_prod_dict.values() ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( ctd_L0_all_dprocdef_id, [ctd_parsed_data_product_year1], out_data_products) self.dataprocessclient.activate_data_process( ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) log.debug( "test_deployAsPrimaryDevice: create L0 all data_process return") #-------------------------------- # Activate the deployment #-------------------------------- self.omsclient.activate_deployment(oldDeployment_id) #------------------------------- # Launch InstrumentAgentInstance Sim1, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=oldInstAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=oldInstAgentInstance_id) #wait for start instance_obj = self.imsclient.read_instrument_agent_instance( oldInstAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, oldInstDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) inst_agent1_instance_obj = self.imsclient.read_instrument_agent_instance( oldInstAgentInstance_id) print 'test_deployAsPrimaryDevice: Instrument agent instance obj: = ', inst_agent1_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client_sim1 = ResourceAgentClient('iaclient Sim1', name=gate.process_id, process=FakeProcess()) print 'activate_instrument: got _ia_client_sim1 %s', self._ia_client_sim1 log.debug(" test_deployAsPrimaryDevice:: got _ia_client_sim1 %s", str(self._ia_client_sim1)) #------------------------------- # Launch InstrumentAgentInstance Sim2, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=newInstAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=newInstAgentInstance_id) #wait for start instance_obj = self.imsclient.read_instrument_agent_instance( newInstAgentInstance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, oldInstDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) inst_agent2_instance_obj = self.imsclient.read_instrument_agent_instance( newInstAgentInstance_id) print 'test_deployAsPrimaryDevice: Instrument agent instance obj: = ', inst_agent2_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client_sim2 = ResourceAgentClient('iaclient Sim2', name=gate.process_id, process=FakeProcess()) print 'activate_instrument: got _ia_client_sim2 %s', self._ia_client_sim2 log.debug(" test_deployAsPrimaryDevice:: got _ia_client_sim2 %s", str(self._ia_client_sim2)) #------------------------------- # Streaming Sim1 (old instrument) #------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client_sim1.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: initialize %s", str(retval)) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: return value from go_active %s", str(reply)) self.assertTrue(reply) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client_sim1.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: run %s", str(reply)) gevent.sleep(2) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client_sim1.execute_resource(cmd) log.debug( "test_activateInstrumentSample: return from START_AUTOSAMPLE: %s", str(retval)) #------------------------------- # Streaming Sim 2 (new instrument) #------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) retval = self._ia_client_sim2.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: initialize_sim2 %s", str(retval)) log.debug("(L4-CI-SA-RQ-334): Sending go_active command ") cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client_sim2.execute_agent(cmd) log.debug( "test_deployAsPrimaryDevice: return value from go_active_sim2 %s", str(reply)) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client_sim2.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active_sim2 command %s", str(state)) cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client_sim2.execute_agent(cmd) log.debug("test_deployAsPrimaryDevice: run %s", str(reply)) gevent.sleep(2) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client_sim2.execute_resource(cmd) log.debug( "test_activateInstrumentSample: return from START_AUTOSAMPLE_sim2: %s", str(retval)) gevent.sleep(10) #------------------------------- # Shutdown Sim1 (old instrument) #------------------------------- cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client_sim1.execute_resource(cmd) log.debug( "test_activateInstrumentSample: return from STOP_AUTOSAMPLE: %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset %s", str(reply)) time.sleep(5) #------------------------------- # Shutdown Sim2 (old instrument) #------------------------------- cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE) retval = self._ia_client_sim2.execute_resource(cmd) log.debug( "test_activateInstrumentSample: return from STOP_AUTOSAMPLE_sim2: %s", str(retval)) log.debug("test_activateInstrumentSample: calling reset_sim2 ") cmd = AgentCommand(command=ResourceAgentEvent.RESET) reply = self._ia_client_sim1.execute_agent(cmd) log.debug("test_activateInstrumentSample: return from reset_sim2 %s", str(reply)) time.sleep(5)
def test_alerts(self): #------------------------------------------------------------------------------------- # Create InstrumentModel #------------------------------------------------------------------------------------- instModel_id = self._create_instrument_model() #------------------------------------------------------------------------------------- # Create InstrumentAgent #------------------------------------------------------------------------------------- instAgent_id = self._create_instrument_agent(instModel_id) #------------------------------------------------------------------------------------- # Create InstrumentDevice #------------------------------------------------------------------------------------- instDevice_id = self._create_instrument_device(instModel_id) # It is necessary for the instrument device to be associated with atleast one output data product tdom, sdom = time_series_domain() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) # We are creating two data products here, one for parsed and another raw dp_obj_parsed = IonObject( RT.DataProduct, name='parsed_data_product', description='Parsed output data product for instrument', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) dp_obj_raw = IonObject( RT.DataProduct, name='raw_data_prod', description='Raw output data product for instrument', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) parsed_out_data_prod_id = self.dataproductclient.create_data_product( data_product=dp_obj_parsed, stream_definition_id=parsed_stream_def_id) raw_out_data_prod_id = self.dataproductclient.create_data_product( data_product=dp_obj_raw, stream_definition_id=raw_stream_def_id) self.addCleanup(self.dataproductclient.delete_data_product, parsed_out_data_prod_id) self.addCleanup(self.dataproductclient.delete_data_product, raw_out_data_prod_id) self.dataproductclient.activate_data_product_persistence( data_product_id=parsed_out_data_prod_id) self.dataproductclient.activate_data_product_persistence( data_product_id=raw_out_data_prod_id) # todo: note that the generated config on the instruments will be done for both raw and parsed stream defs since these two data products constructed with each are associated as output data products with the instrument # todo: if the config is not generated for a stream def, then the instrument agent will complain if the simulator generates data corresponding to a stream def that is not there in the stream config as a mentioned stream def self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=parsed_out_data_prod_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=raw_out_data_prod_id) log.debug("assigned instdevice id: %s to data product: %s", instDevice_id, raw_out_data_prod_id) #------------------------------------------------------------------------------------- # Create Instrument Agent Instance #------------------------------------------------------------------------------------- instAgentInstance_id = self._create_instrument_agent_instance( instAgent_id, instDevice_id) #------------------------------------------------------------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------------------------------------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) # Wait for instrument agent to spawn gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (15), "The instrument agent instance did not spawn in 15 seconds") # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) #------------------------------------------------------------------------------------- # Set up the subscriber to catch the alert event #------------------------------------------------------------------------------------- def callback_for_alert(event, *args, **kwargs): log.debug("caught an alert: %s", event) self.catch_alert.put(event) self.event_subscriber = EventSubscriber( event_type='DeviceStatusAlertEvent', origin=instDevice_id, callback=callback_for_alert) self.event_subscriber.start() self.addCleanup(self.event_subscriber.stop) #------------------------------------------------------------------------------------- # Running the instrument.... #------------------------------------------------------------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) retval = self._ia_client.execute_resource(cmd) got_bad_temp = False got_late_data = False runtime = 0 starttime = time.time() caught_events = [] while (got_bad_temp == False or got_late_data == False) and \ runtime < 120: a = self.catch_alert.get(timeout=90) caught_events.append(a) if a.name == 'temperature_warning_interval' and \ a.description == 'Alert triggered by out of range data values: temp ': got_bad_temp = True if a.name == 'late_data_warning' and \ a.description == 'Expected data has not arrived.': got_late_data = True runtime = time.time() - starttime log.debug("caught_events: %s", [c.name for c in caught_events]) for c in caught_events: self.assertIn( c.name, ['temperature_warning_interval', 'late_data_warning']) self.assertEqual(c.origin, instDevice_id) self.assertEqual(c.type_, 'DeviceStatusAlertEvent') self.assertEqual(c.origin_type, 'InstrumentDevice') self.assertTrue(got_bad_temp) self.assertTrue(got_late_data)
def test_alerts(self): # # test that with the 4009 sim we can get a late data alert # as well as alerts for out of range for > 25, > 50, and > 75 # as well as the ALL_CLEAR alerts for each of them. # #------------------------------------------------------------------------------------- # Create InstrumentModel #------------------------------------------------------------------------------------- instModel_id = self._create_instrument_model() #------------------------------------------------------------------------------------- # Create InstrumentAgent #------------------------------------------------------------------------------------- instAgent_id = self._create_instrument_agent(instModel_id) #------------------------------------------------------------------------------------- # Create InstrumentDevice #------------------------------------------------------------------------------------- instDevice_id = self._create_instrument_device(instModel_id) # It is necessary for the instrument device to be associated with atleast one output data product parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) # We are creating two data products here, one for parsed and another raw dp_obj_parsed = IonObject( RT.DataProduct, name='parsed_data_product', description='Parsed output data product for instrument') dp_obj_raw = IonObject( RT.DataProduct, name='raw_data_prod', description='Raw output data product for instrument') parsed_out_data_prod_id = self.dataproductclient.create_data_product( data_product=dp_obj_parsed, stream_definition_id=parsed_stream_def_id) raw_out_data_prod_id = self.dataproductclient.create_data_product( data_product=dp_obj_raw, stream_definition_id=raw_stream_def_id) self.addCleanup(self.dataproductclient.delete_data_product, parsed_out_data_prod_id) self.addCleanup(self.dataproductclient.delete_data_product, raw_out_data_prod_id) self.dataproductclient.activate_data_product_persistence( data_product_id=parsed_out_data_prod_id) self.dataproductclient.activate_data_product_persistence( data_product_id=raw_out_data_prod_id) # todo: note that the generated config on the instruments will be done for both raw and parsed stream defs since these two data products constructed with each are associated as output data products with the instrument # todo: if the config is not generated for a stream def, then the instrument agent will complain if the simulator generates data corresponding to a stream def that is not there in the stream config as a mentioned stream def self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=parsed_out_data_prod_id) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=raw_out_data_prod_id) log.debug("assigned instdevice id: %s to data product: %s", instDevice_id, raw_out_data_prod_id) #------------------------------------------------------------------------------------- # Create Instrument Agent Instance #------------------------------------------------------------------------------------- instAgentInstance_id = self._create_instrument_agent_instance( instAgent_id, instDevice_id) #------------------------------------------------------------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------------------------------------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.imsclient.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) # Wait for instrument agent to spawn gate = AgentProcessStateGate(self.processdispatchclient.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (15), "The instrument agent instance did not spawn in 15 seconds") # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient(instDevice_id, to_name=gate.process_id, process=FakeProcess()) #------------------------------------------------------------------------------------- # Set up the subscriber to catch the alert event #------------------------------------------------------------------------------------- def callback_for_alert(event, *args, **kwargs): log.debug("caught an alert: %s", event) self.catch_alert.put(event) self.event_subscriber = EventSubscriber( event_type='DeviceStatusAlertEvent', origin=instDevice_id, callback=callback_for_alert) self.event_subscriber.start() self.addCleanup(self.event_subscriber.stop) #------------------------------------------------------------------------------------- # Running the instrument.... #------------------------------------------------------------------------------------- cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE) # Prevent this test from hanging indefinitely until # OOIION-1313 is resolved timeout_val = 90 with gevent.Timeout( timeout_val, Exception('Agent failed to initialize after %fs' % timeout_val)): reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE) with gevent.Timeout( timeout_val, Exception('Agent failed to go active after %fs' % timeout_val)): reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE) with gevent.Timeout( timeout_val, Exception('Agent failed to get resource after %fs' % timeout_val)): retval = self._ia_client.execute_agent(cmd) state = retval.result log.debug( "(L4-CI-SA-RQ-334): current state after sending go_active command %s", str(state)) self.assertTrue(state, 'DRIVER_STATE_COMMAND') cmd = AgentCommand(command=ResourceAgentEvent.RUN) with gevent.Timeout( timeout_val, Exception('Agent failed to run after %fs' % timeout_val)): reply = self._ia_client.execute_agent(cmd) self.assertTrue(reply.status == 0) cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE) with gevent.Timeout( timeout_val, Exception('Agent failed to start autosample after %fs' % timeout_val)): retval = self._ia_client.execute_resource(cmd) got_bad_temp = [False, False, False, False] got_late_data = False got_temp_all_clear = [False, False, False, False] runtime = 0 starttime = time.time() caught_events = [] while (got_bad_temp[0] == False or got_bad_temp[1] == False or got_bad_temp[2] == False or got_temp_all_clear[0] == False or got_temp_all_clear[1] == False or got_temp_all_clear[2] == False or got_late_data == False) and runtime < 120: a = self.catch_alert.get(timeout=180) caught_events.append(a) if a.name == 'temperature_warning_interval temp below 25': if a.sub_type == 'WARNING' and \ a.values[0] > 25: got_bad_temp[0] = True log.error(str(a.values[0]) + " should be above 25") elif a.sub_type == 'ALL_CLEAR': got_temp_all_clear[0] = True log.debug("25 ALL_CLEAR") if a.name == 'temperature_warning_interval temp below 50': if a.sub_type == 'WARNING' and \ a.values[0] > 50: got_bad_temp[1] = True log.error(str(a.values[0]) + " should be above 50") elif a.sub_type == 'ALL_CLEAR': got_temp_all_clear[1] = True log.debug("50 ALL_CLEAR") if a.name == 'temperature_warning_interval temp below 75': if a.sub_type == 'WARNING' and \ a.values[0] > 75: got_bad_temp[2] = True log.error(str(a.values[0]) + " should be above 75") elif a.sub_type == 'ALL_CLEAR': got_temp_all_clear[2] = True log.debug("75 ALL_CLEAR") if a.name == 'late_data_warning' and \ a.description == 'Expected data has not arrived.': got_late_data = True log.debug("late value") runtime = time.time() - starttime log.debug("caught_events: %s", [c.name for c in caught_events]) for c in caught_events: self.assertIn(c.name, [ 'temperature_warning_interval temp below 25', 'temperature_warning_interval temp below 50', 'temperature_warning_interval temp below 75', 'late_data_warning' ]) self.assertEqual(c.origin, instDevice_id) self.assertEqual(c.type_, 'DeviceStatusAlertEvent') self.assertEqual(c.origin_type, 'InstrumentDevice') self.assertTrue(got_bad_temp) #simply log the state of the late data flag and the simulator does not seem to be consistent. log.debug("test_alerts late data alert flag: %s", got_late_data)