def _setup_resources(self): stream_id = self.create_stream_and_logger(name='fibonacci_stream') tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') #TG: Build TaxonomyTool & add to dh_cfg.taxonomy self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'data_producer_id': 'fibonacci_data_producer_id', 'taxonomy': tx.dump(), 'max_records': 4, }
def _setup_resources(self): stream_id = self.create_stream_and_logger(name='fibonacci_stream') tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') #TG: Build TaxonomyTool & add to dh_cfg.taxonomy self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'data_producer_id':'fibonacci_data_producer_id', 'taxonomy':tx.dump(), 'max_records':4, }
def _setup_resources(self): stream_id = self.create_stream_and_logger(name='dummydata_stream') tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id,#TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members 'data_producer_id':'dummy_data_producer_id', 'taxonomy':tx.dump(), 'max_records':4, }
def _setup_resources(self): stream_id = self.create_stream_and_logger(name='dummydata_stream') tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members 'data_producer_id': 'dummy_data_producer_id', 'taxonomy': tx.dump(), 'max_records': 4, }
def test_yamlize(self): tc = TaxyTool() tc.add_taxonomy_set('1', 'a') tc.add_taxonomy_set('2', 'b') tc.extend_names_by_nick_name('1', 'x') tc.extend_names_by_anyname('a', 'z') tc.extend_names_by_anyname('b', 'c') s = tc.dump() tc2 = TaxyTool.load(s) #@todo - a list is not a set and the yaml dump/ion serialization can not handle sets... self.assertEquals(tc2._cnt, 1) self.assertEquals(tc2.get_names_by_handle(0), { '1', 'x', 'a', 'z', }) self.assertEquals(tc2.get_names_by_handle(1), { '2', 'b', 'c', }) self.assertEquals(tc._cnt, 1) self.assertEquals(tc.get_names_by_handle(0), { '1', 'x', 'a', 'z', }) self.assertEquals(tc.get_names_by_handle(1), { '2', 'b', 'c', })
def test_yamlize(self): tc = TaxyTool() tc.add_taxonomy_set('1','a') tc.add_taxonomy_set('2','b') tc.extend_names_by_nick_name('1','x') tc.extend_names_by_anyname('a','z') tc.extend_names_by_anyname('b','c') s = tc.dump() tc2 = TaxyTool.load(s) #@todo - a list is not a set and the yaml dump/ion serialization can not handle sets... self.assertEquals(tc2._cnt,1) self.assertEquals(tc2.get_names_by_handle(0),{'1','x','a','z',}) self.assertEquals(tc2.get_names_by_handle(1),{'2','b','c',}) self.assertEquals(tc._cnt,1) self.assertEquals(tc.get_names_by_handle(0),{'1','x','a','z',}) self.assertEquals(tc.get_names_by_handle(1),{'2','b','c',})
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name='Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'ruv_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['dataset_path'] = 'test_data/RDLi_SEAB_2011_08_24_1600.ruv' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ ] # Create DataSourceModel dsrc_model = DataSourceModel(name='ruv_model') dsrc_model.model = 'RUV' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='ruv_parsed_product', description='parsed ruv product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id})) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('data','test data') #CBM: Eventually, probably want to group this crap somehow - not sure how yet... # Create the logger for receiving publications self.create_stream_and_logger(name='ruv',stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'external_dataset_res':dset, 'taxonomy':ttool.dump(), 'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':20, }
def test_activateDatasetAgent(self): # Create ExternalDatasetModel datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries" ) try: datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj) except BadRequest as ex: self.fail("failed to create new ExternalDatasetModel: %s" %ex) log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id) ) # Create ExternalDatasetAgent datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module=EDA_MOD, handler_class=EDA_CLS ) try: datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new ExternalDatasetAgent: %s" %ex) log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id) ) # Create ExternalDataset log.debug('TestExternalDatasetAgentMgmt: Create external dataset resource ') extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset" ) try: extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new external dataset resource: %s" %ex) log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s ", str(extDataset_id)) #register the dataset as a data producer dproducer_id = self.damsclient.register_external_data_set(extDataset_id) # create a stream definition for the data from the ctd simulator ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def) log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id)) log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition") dp_obj = IonObject(RT.DataProduct,name='eoi dataset data',description=' stream test') try: data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1) ) self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1, persist_data=True, persist_metadata=True) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids) ) stream_id = stream_ids[0] # Build a taxonomy for the dataset tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') # Augment the DVR_CONFIG with the necessary pieces self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id,#TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members 'data_producer_id':dproducer_id, # 'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id 'taxonomy':tx.dump(), #TODO: Currently does not support sets 'max_records':4, } # Create agent config. self._stream_config = {} agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : self._stream_config, 'agent' : {'resource_id': EDA_RESOURCE_ID}, 'test_mode' : True } extDatasetAgentInstance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config = self.DVR_CONFIG, dataset_agent_config = agent_config) extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj) ) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id) ) #Check that the instance is currently not active id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 1 = %s ", str(id), str(active) ) self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id) dataset_agent_instance_obj= self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj) ) # now the instance process should be active id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id) log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 2 = %s ", str(id), str(active) ) # Start a resource agent client to talk with the instrument agent. self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess()) print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client)) # cmd=AgentCommand(command='initialize') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='go_active') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='run') # _ = self._dsa_client.execute_agent(cmd) # # log.info('Send an unconstrained request for data (\'new data\')') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # log.info('Send a second unconstrained request for data (\'new data\'), should be rejected') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # cmd = AgentCommand(command='reset') # _ = self._dsa_client.execute_agent(cmd) # cmd = AgentCommand(command='get_current_state') # retval = self._dsa_client.execute_agent(cmd) # state = retval.result # TODO: Think about what we really should be testing at this point # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states() # TODO: Do we also need to show data retrieval? cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) cmd = AgentCommand(command='initialize') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.INACTIVE) cmd = AgentCommand(command='go_active') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='pause') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command='resume') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='clear') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='pause') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command='clear') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='reset') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name='Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'slocum_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['dataset_path'] = 'test_data/ru05-2012-021-0-0-sbd.dat' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ 'c_wpt_y_lmc', 'sci_water_cond', 'm_y_lmc', 'u_hd_fin_ap_inflection_holdoff', 'sci_m_present_time', 'm_leakdetect_voltage_forward', 'sci_bb3slo_b660_scaled', 'c_science_send_all', 'm_gps_status', 'm_water_vx', 'm_water_vy', 'c_heading', 'sci_fl3slo_chlor_units', 'u_hd_fin_ap_gain', 'm_vacuum', 'u_min_water_depth', 'm_gps_lat', 'm_veh_temp', 'f_fin_offset', 'u_hd_fin_ap_hardover_holdoff', 'c_alt_time', 'm_present_time', 'm_heading', 'sci_bb3slo_b532_scaled', 'sci_fl3slo_cdom_units', 'm_fin', 'x_cycle_overrun_in_ms', 'sci_water_pressure', 'u_hd_fin_ap_igain', 'sci_fl3slo_phyco_units', 'm_battpos', 'sci_bb3slo_b470_scaled', 'm_lat', 'm_gps_lon', 'sci_ctd41cp_timestamp', 'm_pressure', 'c_wpt_x_lmc', 'c_ballast_pumped', 'x_lmc_xy_source', 'm_lon', 'm_avg_speed', 'sci_water_temp', 'u_pitch_ap_gain', 'm_roll', 'm_tot_num_inflections', 'm_x_lmc', 'u_pitch_ap_deadband', 'm_final_water_vy', 'm_final_water_vx', 'm_water_depth', 'm_leakdetect_voltage', 'u_pitch_max_delta_battpos', 'm_coulomb_amphr', 'm_pitch', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='slocum_model') dsrc_model.model = 'SLOCUM' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='slocum_parsed_product', description='parsed slocum product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id})) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('c_wpt_y_lmc'), ttool.add_taxonomy_set('sci_water_cond'), ttool.add_taxonomy_set('m_y_lmc'), ttool.add_taxonomy_set('u_hd_fin_ap_inflection_holdoff'), ttool.add_taxonomy_set('sci_m_present_time'), ttool.add_taxonomy_set('m_leakdetect_voltage_forward'), ttool.add_taxonomy_set('sci_bb3slo_b660_scaled'), ttool.add_taxonomy_set('c_science_send_all'), ttool.add_taxonomy_set('m_gps_status'), ttool.add_taxonomy_set('m_water_vx'), ttool.add_taxonomy_set('m_water_vy'), ttool.add_taxonomy_set('c_heading'), ttool.add_taxonomy_set('sci_fl3slo_chlor_units'), ttool.add_taxonomy_set('u_hd_fin_ap_gain'), ttool.add_taxonomy_set('m_vacuum'), ttool.add_taxonomy_set('u_min_water_depth'), ttool.add_taxonomy_set('m_gps_lat'), ttool.add_taxonomy_set('m_veh_temp'), ttool.add_taxonomy_set('f_fin_offset'), ttool.add_taxonomy_set('u_hd_fin_ap_hardover_holdoff'), ttool.add_taxonomy_set('c_alt_time'), ttool.add_taxonomy_set('m_present_time'), ttool.add_taxonomy_set('m_heading'), ttool.add_taxonomy_set('sci_bb3slo_b532_scaled'), ttool.add_taxonomy_set('sci_fl3slo_cdom_units'), ttool.add_taxonomy_set('m_fin'), ttool.add_taxonomy_set('x_cycle_overrun_in_ms'), ttool.add_taxonomy_set('sci_water_pressure'), ttool.add_taxonomy_set('u_hd_fin_ap_igain'), ttool.add_taxonomy_set('sci_fl3slo_phyco_units'), ttool.add_taxonomy_set('m_battpos'), ttool.add_taxonomy_set('sci_bb3slo_b470_scaled'), ttool.add_taxonomy_set('m_lat'), ttool.add_taxonomy_set('m_gps_lon'), ttool.add_taxonomy_set('sci_ctd41cp_timestamp'), ttool.add_taxonomy_set('m_pressure'), ttool.add_taxonomy_set('c_wpt_x_lmc'), ttool.add_taxonomy_set('c_ballast_pumped'), ttool.add_taxonomy_set('x_lmc_xy_source'), ttool.add_taxonomy_set('m_lon'), ttool.add_taxonomy_set('m_avg_speed'), ttool.add_taxonomy_set('sci_water_temp'), ttool.add_taxonomy_set('u_pitch_ap_gain'), ttool.add_taxonomy_set('m_roll'), ttool.add_taxonomy_set('m_tot_num_inflections'), ttool.add_taxonomy_set('m_x_lmc'), ttool.add_taxonomy_set('u_pitch_ap_deadband'), ttool.add_taxonomy_set('m_final_water_vy'), ttool.add_taxonomy_set('m_final_water_vx'), ttool.add_taxonomy_set('m_water_depth'), ttool.add_taxonomy_set('m_leakdetect_voltage'), ttool.add_taxonomy_set('u_pitch_max_delta_battpos'), ttool.add_taxonomy_set('m_coulomb_amphr'), ttool.add_taxonomy_set('m_pitch'), #CBM: Eventually, probably want to group this crap somehow - not sure how yet... # Create the logger for receiving publications self.create_stream_and_logger(name='slocum',stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'external_dataset_res':dset, 'taxonomy':ttool.dump(), 'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':20, }
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'usgs_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # The usgs.nc test dataset is a download of the R1 dataset found here: # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml dset.dataset_description.parameters[ 'dataset_path'] = 'test_data/usgs.nc' dset.dataset_description.parameters['temporal_dimension'] = 'time' dset.dataset_description.parameters['zonal_dimension'] = 'lon' dset.dataset_description.parameters['meridional_dimension'] = 'lat' dset.dataset_description.parameters['vertical_dimension'] = 'z' dset.dataset_description.parameters['variables'] = [ 'water_temperature', 'streamflow', 'water_temperature_bottom', 'water_temperature_middle', 'specific_conductance', 'data_qualifier', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='dap_model') dsrc_model.model = 'DAP' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set( external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='usgs_parsed_product', description='parsed usgs product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({ 'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id })) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('time', 'time') ttool.add_taxonomy_set('lon', 'longitude') ttool.add_taxonomy_set('lat', 'latitude') ttool.add_taxonomy_set('z', 'water depth') ttool.add_taxonomy_set('water_temperature', 'average water temperature') ttool.add_taxonomy_set('water_temperature_bottom', 'water temperature at bottom of water column') ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column') ttool.add_taxonomy_set('streamflow', 'flow velocity of stream') ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water') ttool.add_taxonomy_set('data_qualifier', 'data qualifier flag') ttool.add_taxonomy_set('coords', 'This group contains coordinate parameters') ttool.add_taxonomy_set('data', 'This group contains data parameters') # Create the logger for receiving publications self.create_stream_and_logger(name='usgs', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'taxonomy': ttool.dump(), 'data_producer_id': dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 4, }
def test_activateDatasetAgent(self): # Create ExternalDatasetModel datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries") try: datasetModel_id = self.damsclient.create_external_dataset_model( datsetModel_obj) except BadRequest as ex: self.fail("failed to create new ExternalDatasetModel: %s" % ex) log.debug( "TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id)) # Create ExternalDatasetAgent datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module=EDA_MOD, handler_class=EDA_CLS) try: datasetAgent_id = self.damsclient.create_external_dataset_agent( datasetAgent_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new ExternalDatasetAgent: %s" % ex) log.debug( "TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id)) # Create ExternalDataset log.debug( 'TestExternalDatasetAgentMgmt: Create external dataset resource ') extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset") try: extDataset_id = self.damsclient.create_external_dataset( extDataset_obj, datasetModel_id) except BadRequest as ex: self.fail("failed to create new external dataset resource: %s" % ex) log.debug( "TestExternalDatasetAgentMgmt: new ExternalDataset id = %s ", str(extDataset_id)) #register the dataset as a data producer dproducer_id = self.damsclient.register_external_data_set( extDataset_id) # create a stream definition for the data from the ctd simulator ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition( container=ctd_stream_def) log.debug( "TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id)) log.debug( "TestExternalDatasetAgentMgmt: Creating new data product with a stream definition" ) dp_obj = IonObject(RT.DataProduct, name='eoi dataset data', description=' stream test') try: data_product_id1 = self.dpclient.create_data_product( dp_obj, ctd_stream_def_id) except BadRequest as ex: self.fail("failed to create new data product: %s" % ex) log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1)) self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1, persist_data=True, persist_metadata=True) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids)) stream_id = stream_ids[0] # Build a taxonomy for the dataset tx = TaxyTool() tx.add_taxonomy_set('data', 'external_data') # Augment the DVR_CONFIG with the necessary pieces self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members 'data_producer_id': dproducer_id, # 'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id 'taxonomy': tx.dump(), #TODO: Currently does not support sets 'max_records': 4, } # Create agent config. self._stream_config = {} agent_config = { 'driver_config': self.DVR_CONFIG, 'stream_config': self._stream_config, 'agent': { 'resource_id': EDA_RESOURCE_ID }, 'test_mode': True } extDatasetAgentInstance_obj = IonObject( RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=agent_config) extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance( external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj)) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id)) #Check that the instance is currently not active id, active = self.damsclient.retrieve_external_dataset_agent_instance( extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 1 = %s ", str(id), str(active)) self.damsclient.start_external_dataset_agent_instance( extDatasetAgentInstance_id) dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance( extDatasetAgentInstance_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj)) # now the instance process should be active id, active = self.damsclient.retrieve_external_dataset_agent_instance( extDataset_id) log.debug( "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s active 2 = %s ", str(id), str(active)) # Start a resource agent client to talk with the instrument agent. self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess()) print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client)) # cmd=AgentCommand(command='initialize') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='go_active') # _ = self._dsa_client.execute_agent(cmd) # # cmd = AgentCommand(command='run') # _ = self._dsa_client.execute_agent(cmd) # # log.info('Send an unconstrained request for data (\'new data\')') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # log.info('Send a second unconstrained request for data (\'new data\'), should be rejected') # cmd = AgentCommand(command='acquire_data') # self._dsa_client.execute(cmd) # # cmd = AgentCommand(command='reset') # _ = self._dsa_client.execute_agent(cmd) # cmd = AgentCommand(command='get_current_state') # retval = self._dsa_client.execute_agent(cmd) # state = retval.result # TODO: Think about what we really should be testing at this point # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states() # TODO: Do we also need to show data retrieval? cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) cmd = AgentCommand(command='initialize') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.INACTIVE) cmd = AgentCommand(command='go_active') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='pause') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command='resume') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='clear') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='pause') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.STOPPED) cmd = AgentCommand(command='clear') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.IDLE) cmd = AgentCommand(command='run') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.OBSERVATORY) cmd = AgentCommand(command='reset') retval = self._dsa_client.execute_agent(cmd) cmd = AgentCommand(command='get_current_state') retval = self._dsa_client.execute_agent(cmd) state = retval.result self.assertEqual(state, InstrumentAgentState.UNINITIALIZED) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.damsclient.stop_external_dataset_agent_instance( extDatasetAgentInstance_id)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name='Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'usgs_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # The usgs.nc test dataset is a download of the R1 dataset found here: # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc' dset.dataset_description.parameters['temporal_dimension'] = 'time' dset.dataset_description.parameters['zonal_dimension'] = 'lon' dset.dataset_description.parameters['meridional_dimension'] = 'lat' dset.dataset_description.parameters['vertical_dimension'] = 'z' dset.dataset_description.parameters['variables'] = [ 'water_temperature', 'streamflow', 'water_temperature_bottom', 'water_temperature_middle', 'specific_conductance', 'data_qualifier', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='dap_model') dsrc_model.model = 'DAP' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='usgs_parsed_product', description='parsed usgs product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id})) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('time','time') ttool.add_taxonomy_set('lon','longitude') ttool.add_taxonomy_set('lat','latitude') ttool.add_taxonomy_set('z','water depth') ttool.add_taxonomy_set('water_temperature', 'average water temperature') ttool.add_taxonomy_set('water_temperature_bottom','water temperature at bottom of water column') ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column') ttool.add_taxonomy_set('streamflow', 'flow velocity of stream') ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water') ttool.add_taxonomy_set('data_qualifier','data qualifier flag') ttool.add_taxonomy_set('coords','This group contains coordinate parameters') ttool.add_taxonomy_set('data','This group contains data parameters') # Create the logger for receiving publications self.create_stream_and_logger(name='usgs',stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'taxonomy':ttool.dump(), 'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':4, }