def setUp(self): self._start_container() self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml') self.dams_cli = DataAcquisitionManagementServiceClient() self.dpms_cli = DataProductManagementServiceClient() eda = ExternalDatasetAgent() self.eda_id = self.dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() self.eda_inst_id = self.dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=self.eda_id) self._setup_ncom() proc_name = self.ncom_ds_id + '_worker' config = {} config['process'] = {'name': proc_name, 'type': 'agent'} config['process']['eoa'] = {'dataset_id': self.ncom_ds_id} pid = self.container.spawn_process( name=proc_name, module='eoi.agent.external_observatory_agent', cls='ExternalObservatoryAgent', config=config) queue_id = "%s.%s" % (self.container.id, pid) log.debug( "Spawned worker process ==> proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) self._agent_cli = ResourceAgentClient(self.ncom_ds_id, name=pid, process=FakeProcess()) log.debug("Got a ResourceAgentClient: res_id=%s" % self._agent_cli.resource_id)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dms_cli = DatasetManagementServiceClient() dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent(handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls']) eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.individual_names_given = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.individual_names_given = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'slocum_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['base_url'] = 'test_data/slocum/' dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat' dset.dataset_description.parameters['date_pattern'] = '%Y %j' dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ 'c_wpt_y_lmc', 'sci_water_cond', 'm_y_lmc', 'u_hd_fin_ap_inflection_holdoff', 'sci_m_present_time', 'm_leakdetect_voltage_forward', 'sci_bb3slo_b660_scaled', 'c_science_send_all', 'm_gps_status', 'm_water_vx', 'm_water_vy', 'c_heading', 'sci_fl3slo_chlor_units', 'u_hd_fin_ap_gain', 'm_vacuum', 'u_min_water_depth', 'm_gps_lat', 'm_veh_temp', 'f_fin_offset', 'u_hd_fin_ap_hardover_holdoff', 'c_alt_time', 'm_present_time', 'm_heading', 'sci_bb3slo_b532_scaled', 'sci_fl3slo_cdom_units', 'm_fin', 'x_cycle_overrun_in_ms', 'sci_water_pressure', 'u_hd_fin_ap_igain', 'sci_fl3slo_phyco_units', 'm_battpos', 'sci_bb3slo_b470_scaled', 'm_lat', 'm_gps_lon', 'sci_ctd41cp_timestamp', 'm_pressure', 'c_wpt_x_lmc', 'c_ballast_pumped', 'x_lmc_xy_source', 'm_lon', 'm_avg_speed', 'sci_water_temp', 'u_pitch_ap_gain', 'm_roll', 'm_tot_num_inflections', 'm_x_lmc', 'u_pitch_ap_deadband', 'm_final_water_vy', 'm_final_water_vx', 'm_water_depth', 'm_leakdetect_voltage', 'u_pitch_max_delta_battpos', 'm_coulomb_amphr', 'm_pitch', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='slocum_model') # dsrc_model.model = 'SLOCUM' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) #create temp streamdef so the data product can create the stream pc_list = [] for pc_k, pc in self._create_parameter_dictionary().iteritems(): pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump())) pdict_id = dms_cli.create_parameter_dictionary('slocum_param_dict', pc_list) streamdef_id = pubsub_cli.create_stream_definition(name="slocum_stream_def", description="stream def for slocum testing", parameter_dictionary_id=pdict_id) # dpms_cli.create_data_product() # Generate the data product and associate it to the ExternalDataset tdom, sdom = time_series_domain() tdom, sdom = tdom.dump(), sdom.dump() dprod = IonObject(RT.DataProduct, name='slocum_parsed_product', description='parsed slocum product', temporal_domain=tdom, spatial_domain=sdom) dproduct_id = dpms_cli.create_data_product(data_product=dprod, stream_definition_id=streamdef_id) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id})) # Create the logger for receiving publications _, stream_route, _ = self.create_stream_and_logger(name='slocum', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'stream_route': stream_route, 'stream_def': streamdef_id, 'external_dataset_res': dset, 'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 20, }
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dms_cli = DatasetManagementServiceClient() dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent(name='example eda',handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls']) eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance(name='example eda instance') eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(name='example data provider', institution=Institution(), contact=ContactInformation()) dprov.contact.individual_names_given = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(name='example datasource', protocol_type='DAP', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.individual_names_given = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'usgs_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # The usgs.nc test dataset is a download of the R1 dataset found here: # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc' dset.dataset_description.parameters['temporal_dimension'] = 'time' dset.dataset_description.parameters['zonal_dimension'] = 'lon' dset.dataset_description.parameters['meridional_dimension'] = 'lat' dset.dataset_description.parameters['vertical_dimension'] = 'z' dset.dataset_description.parameters['variables'] = [ 'water_temperature', 'streamflow', 'water_temperature_bottom', 'water_temperature_middle', 'specific_conductance', 'data_qualifier', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='dap_model') #dsrc_model.model = 'DAP' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) #create temp streamdef so the data product can create the stream pc_list = [] #Get 'time' parameter context pc_list.append(dms_cli.read_parameter_context_by_name('time', id_only=True)) for pc_k, pc in self._create_parameter_dictionary().iteritems(): pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump())) pdict_id = dms_cli.create_parameter_dictionary('netcdf_param_dict', pc_list) #create temp streamdef so the data product can create the stream streamdef_id = pubsub_cli.create_stream_definition(name="netcdf", description="netcdf", parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() tdom, sdom = tdom.dump(), sdom.dump() dprod = IonObject(RT.DataProduct, name='usgs_parsed_product', description='parsed usgs product', temporal_domain=tdom, spatial_domain=sdom) # Generate the data product and associate it to the ExternalDataset dproduct_id = dpms_cli.create_data_product(data_product=dprod, stream_definition_id=streamdef_id) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id})) # Create the logger for receiving publications _, stream_route, _ = self.create_stream_and_logger(name='usgs', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'stream_route': stream_route, 'stream_def': streamdef_id, 'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 1, }
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dms_cli = DatasetManagementServiceClient() dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() pubsub_cli = PubsubManagementServiceClient() eda = ExternalDatasetAgent(name='example data agent', handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls']) eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance( name='example dataset agent instance') eda_inst_id = dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(name='example data provider', institution=Institution(), contact=ContactInformation()) dprov.contact.individual_names_given = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(name='example datasource', protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.individual_names_given = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'ruv_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['base_url'] = 'test_data/ruv/' dset.dataset_description.parameters[ 'list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv' dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M' dset.dataset_description.parameters[ 'date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [] # Create DataSourceModel dsrc_model = DataSourceModel(name='ruv_model') #dsrc_model.model = 'RUV' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set( external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) pdict = ParameterDictionary() t_ctxt = ParameterContext( 'data', param_type=QuantityType(value_encoding=numpy.dtype('int64'))) t_ctxt.axis = AxisTypeEnum.TIME t_ctxt.uom = 'seconds since 01-01-1970' pdict.add_context(t_ctxt) #create temp streamdef so the data product can create the stream pc_list = [] for pc_k, pc in pdict.iteritems(): pc_list.append(dms_cli.create_parameter_context( pc_k, pc[1].dump())) pdict_id = dms_cli.create_parameter_dictionary('ruv_param_dict', pc_list) streamdef_id = pubsub_cli.create_stream_definition( name="ruv", description="stream def for ruv testing", parameter_dictionary_id=pdict_id) dprod = IonObject(RT.DataProduct, name='ruv_parsed_product', description='parsed ruv product') # Generate the data product and associate it to the ExternalDataset dproduct_id = dpms_cli.create_data_product( data_product=dprod, stream_definition_id=streamdef_id) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({ 'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id })) #CBM: Eventually, probably want to group this crap somehow - not sure how yet... # Create the logger for receiving publications _, stream_route, _ = self.create_stream_and_logger(name='ruv', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'stream_route': stream_route, 'external_dataset_res': dset, 'param_dictionary': pdict.dump(), 'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 20, }
def _setup_hfr(self): # TODO: some or all of this (or some variation) should move to DAMS # Create and register the necessary resources/objects eda = ExternalDatasetAgent() eda_id = self.dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = self.dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) # dprov.institution.name = "HFR UCSD" # Create DataSource dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation()) dsrc.connection_params[ "base_data_url"] = "http://hfrnet.ucsd.edu:8080/thredds/dodsC/" # Create ExternalDataset dset = ExternalDataset(name="UCSD HFR", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters[ "dataset_path"] = "HFRNet/USEGC/6km/hourly/RTV" # dset.dataset_description.parameters["dataset_path"] = "test_data/hfr.nc" dset.dataset_description.parameters["temporal_dimension"] = "time" dset.dataset_description.parameters["zonal_dimension"] = "lon" dset.dataset_description.parameters["meridional_dimension"] = "lat" # Create DataSourceModel dsrc_model = DataSourceModel(name="dap_model") dsrc_model.model = "DAP" dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler" dsrc_model.data_handler_class = "DapExternalDataHandler" ## Run everything through DAMS ds_id = self.hfr_ds_id = self.dams_cli.create_external_dataset( external_dataset=dset) ext_dprov_id = self.dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = self.dams_cli.register_external_data_set( external_dataset_id=ds_id) ## Associate everything # Convenience method # self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Or using each method self.dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) self.dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) self.dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) self.dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id) # self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='hfr_product', description='raw hfr product') dproduct_id = self.dpms_cli.create_data_product(data_product=dprod) self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)
def _setup_ncom(self): # TODO: some or all of this (or some variation) should move to DAMS # Create and register the necessary resources/objects eda = ExternalDatasetAgent() eda_id = self.dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = self.dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) # dprov.institution.name = "OOI CGSN" dprov.contact.name = "Robert Weller" dprov.contact.email = "*****@*****.**" # Create DataSource dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation()) # dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/" dsrc.connection_params["base_data_url"] = "" dsrc.contact.name = "Rich Signell" dsrc.contact.email = "*****@*****.**" # Create ExternalDataset dset = ExternalDataset(name="test", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc" dset.dataset_description.parameters[ "dataset_path"] = "test_data/ncom.nc" dset.dataset_description.parameters["temporal_dimension"] = "time" dset.dataset_description.parameters["zonal_dimension"] = "lon" dset.dataset_description.parameters["meridional_dimension"] = "lat" # Create DataSourceModel dsrc_model = DataSourceModel(name="dap_model") dsrc_model.model = "DAP" dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler" dsrc_model.data_handler_class = "DapExternalDataHandler" ## Run everything through DAMS ds_id = self.ncom_ds_id = self.dams_cli.create_external_dataset( external_dataset=dset) ext_dprov_id = self.dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = self.dams_cli.register_external_data_set( external_dataset_id=ds_id) ## Associate everything # Convenience method # self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Or using each method self.dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) self.dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) self.dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) self.dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id) # self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='ncom_product', description='raw ncom product') dproduct_id = self.dpms_cli.create_data_product(data_product=dprod) self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name='Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'slocum_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters['dataset_path'] = 'test_data/ru05-2012-021-0-0-sbd.dat' dset.dataset_description.parameters['temporal_dimension'] = None dset.dataset_description.parameters['zonal_dimension'] = None dset.dataset_description.parameters['meridional_dimension'] = None dset.dataset_description.parameters['vertical_dimension'] = None dset.dataset_description.parameters['variables'] = [ 'c_wpt_y_lmc', 'sci_water_cond', 'm_y_lmc', 'u_hd_fin_ap_inflection_holdoff', 'sci_m_present_time', 'm_leakdetect_voltage_forward', 'sci_bb3slo_b660_scaled', 'c_science_send_all', 'm_gps_status', 'm_water_vx', 'm_water_vy', 'c_heading', 'sci_fl3slo_chlor_units', 'u_hd_fin_ap_gain', 'm_vacuum', 'u_min_water_depth', 'm_gps_lat', 'm_veh_temp', 'f_fin_offset', 'u_hd_fin_ap_hardover_holdoff', 'c_alt_time', 'm_present_time', 'm_heading', 'sci_bb3slo_b532_scaled', 'sci_fl3slo_cdom_units', 'm_fin', 'x_cycle_overrun_in_ms', 'sci_water_pressure', 'u_hd_fin_ap_igain', 'sci_fl3slo_phyco_units', 'm_battpos', 'sci_bb3slo_b470_scaled', 'm_lat', 'm_gps_lon', 'sci_ctd41cp_timestamp', 'm_pressure', 'c_wpt_x_lmc', 'c_ballast_pumped', 'x_lmc_xy_source', 'm_lon', 'm_avg_speed', 'sci_water_temp', 'u_pitch_ap_gain', 'm_roll', 'm_tot_num_inflections', 'm_x_lmc', 'u_pitch_ap_deadband', 'm_final_water_vy', 'm_final_water_vx', 'm_water_depth', 'm_leakdetect_voltage', 'u_pitch_max_delta_battpos', 'm_coulomb_amphr', 'm_pitch', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='slocum_model') dsrc_model.model = 'SLOCUM' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='slocum_parsed_product', description='parsed slocum product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id})) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('c_wpt_y_lmc'), ttool.add_taxonomy_set('sci_water_cond'), ttool.add_taxonomy_set('m_y_lmc'), ttool.add_taxonomy_set('u_hd_fin_ap_inflection_holdoff'), ttool.add_taxonomy_set('sci_m_present_time'), ttool.add_taxonomy_set('m_leakdetect_voltage_forward'), ttool.add_taxonomy_set('sci_bb3slo_b660_scaled'), ttool.add_taxonomy_set('c_science_send_all'), ttool.add_taxonomy_set('m_gps_status'), ttool.add_taxonomy_set('m_water_vx'), ttool.add_taxonomy_set('m_water_vy'), ttool.add_taxonomy_set('c_heading'), ttool.add_taxonomy_set('sci_fl3slo_chlor_units'), ttool.add_taxonomy_set('u_hd_fin_ap_gain'), ttool.add_taxonomy_set('m_vacuum'), ttool.add_taxonomy_set('u_min_water_depth'), ttool.add_taxonomy_set('m_gps_lat'), ttool.add_taxonomy_set('m_veh_temp'), ttool.add_taxonomy_set('f_fin_offset'), ttool.add_taxonomy_set('u_hd_fin_ap_hardover_holdoff'), ttool.add_taxonomy_set('c_alt_time'), ttool.add_taxonomy_set('m_present_time'), ttool.add_taxonomy_set('m_heading'), ttool.add_taxonomy_set('sci_bb3slo_b532_scaled'), ttool.add_taxonomy_set('sci_fl3slo_cdom_units'), ttool.add_taxonomy_set('m_fin'), ttool.add_taxonomy_set('x_cycle_overrun_in_ms'), ttool.add_taxonomy_set('sci_water_pressure'), ttool.add_taxonomy_set('u_hd_fin_ap_igain'), ttool.add_taxonomy_set('sci_fl3slo_phyco_units'), ttool.add_taxonomy_set('m_battpos'), ttool.add_taxonomy_set('sci_bb3slo_b470_scaled'), ttool.add_taxonomy_set('m_lat'), ttool.add_taxonomy_set('m_gps_lon'), ttool.add_taxonomy_set('sci_ctd41cp_timestamp'), ttool.add_taxonomy_set('m_pressure'), ttool.add_taxonomy_set('c_wpt_x_lmc'), ttool.add_taxonomy_set('c_ballast_pumped'), ttool.add_taxonomy_set('x_lmc_xy_source'), ttool.add_taxonomy_set('m_lon'), ttool.add_taxonomy_set('m_avg_speed'), ttool.add_taxonomy_set('sci_water_temp'), ttool.add_taxonomy_set('u_pitch_ap_gain'), ttool.add_taxonomy_set('m_roll'), ttool.add_taxonomy_set('m_tot_num_inflections'), ttool.add_taxonomy_set('m_x_lmc'), ttool.add_taxonomy_set('u_pitch_ap_deadband'), ttool.add_taxonomy_set('m_final_water_vy'), ttool.add_taxonomy_set('m_final_water_vx'), ttool.add_taxonomy_set('m_water_depth'), ttool.add_taxonomy_set('m_leakdetect_voltage'), ttool.add_taxonomy_set('u_pitch_max_delta_battpos'), ttool.add_taxonomy_set('m_coulomb_amphr'), ttool.add_taxonomy_set('m_pitch'), #CBM: Eventually, probably want to group this crap somehow - not sure how yet... # Create the logger for receiving publications self.create_stream_and_logger(name='slocum',stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING':True, 'stream_id':stream_id, 'external_dataset_res':dset, 'taxonomy':ttool.dump(), 'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls? 'max_records':20, }
def _setup_resources(self): # TODO: some or all of this (or some variation) should move to DAMS' # Build the test resources for the dataset dams_cli = DataAcquisitionManagementServiceClient() dpms_cli = DataProductManagementServiceClient() rr_cli = ResourceRegistryServiceClient() eda = ExternalDatasetAgent() eda_id = dams_cli.create_external_dataset_agent(eda) eda_inst = ExternalDatasetAgentInstance() eda_inst_id = dams_cli.create_external_dataset_agent_instance( eda_inst, external_dataset_agent_id=eda_id) # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) dprov.contact.name = 'Christopher Mueller' dprov.contact.email = '*****@*****.**' # Create DataSource dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation()) dsrc.connection_params['base_data_url'] = '' dsrc.contact.name = 'Tim Giguere' dsrc.contact.email = '*****@*****.**' # Create ExternalDataset ds_name = 'usgs_test_dataset' dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # The usgs.nc test dataset is a download of the R1 dataset found here: # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml dset.dataset_description.parameters[ 'dataset_path'] = 'test_data/usgs.nc' dset.dataset_description.parameters['temporal_dimension'] = 'time' dset.dataset_description.parameters['zonal_dimension'] = 'lon' dset.dataset_description.parameters['meridional_dimension'] = 'lat' dset.dataset_description.parameters['vertical_dimension'] = 'z' dset.dataset_description.parameters['variables'] = [ 'water_temperature', 'streamflow', 'water_temperature_bottom', 'water_temperature_middle', 'specific_conductance', 'data_qualifier', ] # Create DataSourceModel dsrc_model = DataSourceModel(name='dap_model') dsrc_model.model = 'DAP' dsrc_model.data_handler_module = 'N/A' dsrc_model.data_handler_class = 'N/A' ## Run everything through DAMS ds_id = dams_cli.create_external_dataset(external_dataset=dset) ext_dprov_id = dams_cli.create_external_data_provider( external_data_provider=dprov) ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc) ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model) # Register the ExternalDataset dproducer_id = dams_cli.register_external_data_set( external_dataset_id=ds_id) # Or using each method dams_cli.assign_data_source_to_external_data_provider( data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id) dams_cli.assign_data_source_to_data_model( data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id) dams_cli.assign_external_dataset_to_data_source( external_dataset_id=ds_id, data_source_id=ext_dsrc_id) dams_cli.assign_external_dataset_to_agent_instance( external_dataset_id=ds_id, agent_instance_id=eda_inst_id) # dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id) # Generate the data product and associate it to the ExternalDataset dprod = DataProduct(name='usgs_parsed_product', description='parsed usgs product') dproduct_id = dpms_cli.create_data_product(data_product=dprod) dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True) stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) stream_id = stream_id[0] log.info('Created resources: {0}'.format({ 'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id })) #CBM: Use CF standard_names ttool = TaxyTool() ttool.add_taxonomy_set('time', 'time') ttool.add_taxonomy_set('lon', 'longitude') ttool.add_taxonomy_set('lat', 'latitude') ttool.add_taxonomy_set('z', 'water depth') ttool.add_taxonomy_set('water_temperature', 'average water temperature') ttool.add_taxonomy_set('water_temperature_bottom', 'water temperature at bottom of water column') ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column') ttool.add_taxonomy_set('streamflow', 'flow velocity of stream') ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water') ttool.add_taxonomy_set('data_qualifier', 'data qualifier flag') ttool.add_taxonomy_set('coords', 'This group contains coordinate parameters') ttool.add_taxonomy_set('data', 'This group contains data parameters') # Create the logger for receiving publications self.create_stream_and_logger(name='usgs', stream_id=stream_id) self.EDA_RESOURCE_ID = ds_id self.EDA_NAME = ds_name self.DVR_CONFIG['dh_cfg'] = { 'TESTING': True, 'stream_id': stream_id, 'taxonomy': ttool.dump(), 'data_producer_id': dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls? 'max_records': 4, }