def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubclient = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.imsclient = InstrumentManagementServiceClient() self.dataproductclient = DataProductManagementServiceClient() self.dataprocessclient = DataProcessManagementServiceClient() self.datasetclient = DatasetManagementServiceClient() self.dataset_management = self.datasetclient self.process_dispatcher = ProcessDispatcherServiceClient()
def get_parameter_dictionary(cls, parameter_dictionary_id=''): ''' Preferred client-side class method for constructing a parameter dictionary from a service call. ''' dms_cli = DatasetManagementServiceClient() pd = dms_cli.read_parameter_dictionary(parameter_dictionary_id) pcs = dms_cli.read_parameter_contexts( parameter_dictionary_id=parameter_dictionary_id, id_only=False) pdict = cls._merge_contexts( [ParameterContext.load(i.parameter_context) for i in pcs], pd.temporal_context) pdict._identifier = parameter_dictionary_id return pdict
def publish_loop(self): sine_ampl = 2.0 # Amplitude in both directions samples = 60 startTime = time.time() count = samples #something other than zero self.dataset_management = DatasetManagementServiceClient( node=self.container.node) while not self.finished.is_set(): count = time.time() - startTime sine_curr_deg = (count % samples) * 360 / samples c = numpy.array( [sine_ampl * math.sin(math.radians(sine_curr_deg))]) t = numpy.array( [sine_ampl * 2 * math.sin(math.radians(sine_curr_deg + 45))]) p = numpy.array( [sine_ampl * 4 * math.sin(math.radians(sine_curr_deg + 60))]) lat = numpy.array([32.8]) lon = numpy.array([-119.6]) # convert time to ntp time. Standard notation in the system tvar = numpy.array([ntplib.system_to_ntp_time(time.time())]) parameter_dictionary = self._create_parameter() #parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') rdt = RecordDictionaryTool(param_dictionary=parameter_dictionary) h = numpy.array([random.uniform(0.0, 360.0)]) rdt['time'] = tvar rdt['lat'] = lat rdt['lon'] = lon rdt['temp'] = t rdt['conductivity'] = c rdt['pressure'] = p g = rdt.to_granule(data_producer_id=self.id) log.info('SinusoidalCtdPublisher sending 1 record!') self.publisher.publish(g, self.stream_id) time.sleep(1.0)
def _build_stream_config(self): """ """ # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) dataset_management = DatasetManagementServiceClient() # Create streams and subscriptions for each stream named in driver. self._stream_config = {} stream_name = 'parsed' param_dict_name = 'ctd_parsed_param_dict' pd_id = dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) pd = pubsub_client.read_stream_definition( stream_def_id).parameter_dictionary stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict(routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, stream_definition_ref=stream_def_id, parameter_dictionary=pd) self._stream_config[stream_name] = stream_config stream_name = 'raw' param_dict_name = 'ctd_raw_param_dict' pd_id = dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) pd = pubsub_client.read_stream_definition( stream_def_id).parameter_dictionary stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict(routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, stream_definition_ref=stream_def_id, parameter_dictionary=pd) self._stream_config[stream_name] = stream_config
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') log.debug("TestExternalDatasetAgentMgmt: started services") # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node)
def setUp(self): # Start container #print 'instantiating container' self._start_container() log.debug("Start rel from url") self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.DPMS = DataProductManagementServiceClient() self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.DAMS = DataAcquisitionManagementServiceClient() self.PSMS = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.PD = ProcessDispatcherServiceClient() self.DSMS = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ log.debug("get datastore") datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.PSMS.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.PD.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] self.addCleanup(self.cleaning_up)
def setUp(self): #print >> sys.stderr, "setup" self._start_container() #print >> sys.stderr, "start container" self.container.start_rel_from_url('res/deploy/r2deploy.yml') #print >> sys.stderr, "deploy" self.dataset_management = DatasetManagementServiceClient() #print >> sys.stderr, "dataset management" #setup registry process and patch in CFG def init(self): super(RegistrationProcess, self).__init__() self.CFG = CFG RegistrationProcess.__init__ = init self.rp = RegistrationProcess() self.rp.on_start()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def setUp(self): self.i = 0 self._start_container() self.container.start_rel_from_url('res/deploy/r2params.yml') self.dataset_management = DatasetManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.resource_registry = self.container.resource_registry self.data_retriever = DataRetrieverServiceClient() pdicts, _ = self.resource_registry.find_resources( restype='ParameterDictionary', id_only=False) self.dp_ids = [] for pdict in pdicts: stream_def_id = self.pubsub_management.create_stream_definition( pdict.name, parameter_dictionary_id=pdict._id) dp_id = self.make_dp(stream_def_id) if dp_id: self.dp_ids.append(dp_id)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.client = DataProductManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient() self.org_management_service = OrgManagementServiceClient() self.IMS = InstrumentManagementServiceClient() self.dpclient = DataProductManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.data_product_management = DataProductManagementServiceClient() self._load_stage = 0 self._resources = {}
def setUp(self): super(CtdTransformsIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub = PubsubManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.dataproduct_management = DataProductManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() # This is for the time values inside the packets going into the transform self.i = 0 # Cleanup of queue created by the subscriber self.queue_cleanup = [] self.data_process_cleanup = []
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.catch_alert = gevent.queue.Queue()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.client.DPRS.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def load_data_product(self): dset_i = 0 dataset_management = DatasetManagementServiceClient() pubsub_management = PubsubManagementServiceClient() data_product_management = DataProductManagementServiceClient() resource_registry = self.container.instance.resource_registry tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = DataProduct(name='instrument_data_product_%i' % dset_i, description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) pdict_id = dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) stream_def_id = pubsub_management.create_stream_definition( name='parsed', parameter_dictionary_id=pdict_id) self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id) data_product_id = data_product_management.create_data_product( data_product=dp_obj, stream_definition_id=stream_def_id) self.addCleanup(data_product_management.delete_data_product, data_product_id) data_product_management.activate_data_product_persistence( data_product_id) self.addCleanup( data_product_management.suspend_data_product_persistence, data_product_id) stream_ids, assocs = resource_registry.find_objects( subject=data_product_id, predicate='hasStream', id_only=True) stream_id = stream_ids[0] route = pubsub_management.read_stream_route(stream_id) dataset_ids, assocs = resource_registry.find_objects( subject=data_product_id, predicate='hasDataset', id_only=True) dataset_id = dataset_ids[0] return data_product_id, stream_id, route, stream_def_id, dataset_id
def setUp(self): # Love the non pep-8 convention self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.process_dispatcher = ProcessDispatcherServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.ingestion_management = IngestionManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.pids = [] self.event = Event() self.exchange_space_name = 'test_granules' self.exchange_point_name = 'science_data' self.i = 0 self.purge_queues() self.queue_buffer = [] self.streams = [] self.addCleanup(self.stop_all_ingestion)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher()
def setUp(self): self.username = CFG.get_safe('eoi.geoserver.user_name', 'admin') self.PASSWORD = CFG.get_safe('eoi.geoserver.password', 'geoserver') self.gs_host = CFG.get_safe('eoi.geoserver.server', 'http://localhost:8080') self.gs_rest_url = ''.join([self.gs_host, '/geoserver/rest']) self.gs_ows_url = ''.join([self.gs_host, '/geoserver/ows']) IMPORTER_SERVICE_SERVER = CFG.get_safe('eoi.importer_service.server', 'http://localhost') IMPORTER_SERVICE_PORT = str( CFG.get_safe('eoi.importer_service.port', 8844)) self.importer_service_url = ''.join( [IMPORTER_SERVICE_SERVER, ':', IMPORTER_SERVICE_PORT]) self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.resource_registry = self.container.resource_registry
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) print 'started services'
def _build_stream_config(self): """ """ # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) dataset_management = DatasetManagementServiceClient() # Create streams and subscriptions for each stream named in driver. self.stream_config = {} streams = { 'parsed': 'ctd_parsed_param_dict', 'raw': 'ctd_raw_param_dict' } for (stream_name, param_dict_name) in streams.iteritems(): pd_id = dataset_management.read_parameter_dictionary_by_name( DEFAULT_PARAM_DICT, id_only=True) if (not pd_id): log.error("No pd_id found for param_dict '%s'" % DEFAULT_PARAM_DICT) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) pd = None stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict(stream_route=stream_route, routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, stream_definition_ref=stream_def_id, parameter_dictionary=pd) self.stream_config[stream_name] = stream_config
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.time_dom, self.spatial_dom = time_series_domain() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) self.stream_id, self.route_id = self.pubsub_client.create_stream( name='parsed_stream', stream_definition_id=self.stream_def_id, exchange_point='science_data') self.addCleanup(self.pubsub_client.delete_stream, self.stream_id) self.subscription_id = self.pubsub_client.create_subscription( name='parsed_subscription', stream_ids=[self.stream_id], exchange_name='parsed_subscription') self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) self.publisher = StandaloneStreamPublisher(self.stream_id, self.route_id)
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.client.DPMS = DataProductManagementServiceClient(node=self.container.node) self.client.IMS = InstrumentManagementServiceClient(node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.client.PSMS = PubsubManagementServiceClient(node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient(node=self.container.node) self.client.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources(RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient() self.IDS = IdentityManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # Use the network definition provided by RSN OMS directly. rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri']) self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms) # get serialized version for the configuration: self._network_definition_ser = NetworkUtil.serialize_network_definition( self._network_definition) if log.isEnabledFor(logging.TRACE): log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser) self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber()
def helper_create_highcharts_data_process_definition(container): from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient rrclient = ResourceRegistryServiceClient(node=container.node) #First look to see if it exists and if not, then create it dpd,_ = rrclient.find_resources(restype=RT.DataProcessDefinition, name='highcharts_transform') if len(dpd) > 0: return dpd[0] # Data Process Definition log.debug("Create data process definition for highcharts transform") dpd_obj = IonObject(RT.DataProcessDefinition, name='highcharts_transform', description='Convert data streams to Highcharts data', module='ion.processes.data.transforms.viz.highcharts', class_name='VizTransformHighCharts') from interface.services.sa.idata_process_management_service import DataProcessManagementServiceClient dataprocessclient = DataProcessManagementServiceClient(node=container.node) procdef_id = dataprocessclient.create_data_process_definition(dpd_obj) from interface.services.dm.idataset_management_service import DatasetManagementServiceClient datasetclient = DatasetManagementServiceClient(node=container.node) pdict_id = datasetclient.read_parameter_dictionary_by_name('highcharts', id_only=True) from interface.services.dm.ipubsub_management_service import PubsubManagementServiceClient pubsubclient = PubsubManagementServiceClient(node=container.node) # create a stream definition for the data from the stream_def_id = pubsubclient.create_stream_definition(name='VizTransformHighCharts', parameter_dictionary_id=pdict_id) dataprocessclient.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id, binding='highcharts' ) return procdef_id
def setUp(self): super(DataRetrieverIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2dm.yml') self.datastore_name = 'test_datasets' self.datastore = self.container.datastore_manager.get_datastore( self.datastore_name, profile=DataStore.DS_PROFILE.SCIDATA) self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() self.pubsub_management = PubsubManagementServiceClient() xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([get_sys_name(), xp_base]) except ValueError: raise StandardError( 'Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp)
def get_parameter_dictionary_by_name(cls, name=''): dms_cli = DatasetManagementServiceClient() pd_res = dms_cli.read_parameter_dictionary_by_name(name=name, id_only=True) return cls.get_parameter_dictionary(pd_res)
def test_download(self): # clients dataset_management = DatasetManagementServiceClient() # verify target object [REFDES01] do not exist in object_store self.assertRaises(NotFound, dataset_management.read_qc_table, 'REFDES01') # NOTE: time is again monkey patched in 'test_upload_qc' but should be static for that # MONKEY PATCH time.time() for volatile ts_updated values in dict (set in POST below) CONSTANT_TIME = time.time() # time value we'll use in assert tests def new_time(): return CONSTANT_TIME old_time = time.time time.time = new_time #upload some data self.test_upload_qc() # restore MONKEY PATCHed time time.time = old_time REFDES01 = dataset_management.read_qc_table('REFDES01') RD01DP01 = REFDES01.get('RD01DP01', None) self.assertEquals( RD01DP01, { 'stuck_value': [{ 'units': 'C', 'consecutive_values': 10, 'ts_created': CONSTANT_TIME, 'resolution': 0.005, 'author': 'Otter' }], 'gradient_test': [{ 'toldat': 0.1, 'xunits': 's', 'mindx': 30, 'author': 'Boon', 'startdat': None, 'ddatdx': [-0.01, 0.01], 'units': 'C', 'ts_created': CONSTANT_TIME }], 'global_range': [{ 'units': 'm/s', 'max_value': 1, 'min_value': -1, 'ts_created': CONSTANT_TIME, 'author': 'Douglas C. Neidermeyer' }, { 'units': 'm/s', 'max_value': 10, 'min_value': -10, 'ts_created': CONSTANT_TIME, 'author': 'Bluto' }], 'trend_test': [{ 'author': 'Pinto', 'standard_deviation': 4.5, 'polynomial_order': 4, 'sample_length': 25, 'units': 'K', 'ts_created': CONSTANT_TIME }], 'spike_test': [{ 'author': 'Flounder', 'range_multiplier': 4, 'window_length': 15, 'units': 'degrees', 'ts_created': CONSTANT_TIME, 'accuracy': 0.0001 }] })