def setUp(self): super(DataRetrieverServiceIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2dm.yml') self.couch = self.container.datastore_manager.get_datastore( 'test_data_retriever', profile=DataStore.DS_PROFILE.SCIDATA) self.datastore_name = 'test_data_retriever' self.dr_cli = DataRetrieverServiceClient(node=self.container.node) self.dsm_cli = DatasetManagementServiceClient(node=self.container.node) self.rr_cli = ResourceRegistryServiceClient(node=self.container.node) self.ps_cli = PubsubManagementServiceClient(node=self.container.node) self.tms_cli = TransformManagementServiceClient( node=self.container.node) self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([bootstrap.get_sys_name(), xp_base]) except ValueError: raise StandardError( 'Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp) self.thread_pool = list()
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = []
def setUp(self): # Start container super(TestActivateRSNVel3DInstrument, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def _build_stream_config(self): """ """ if (not self.packet_config): return streams = self.packet_config log.debug("Streams: %s", streams) # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) dataset_management = DatasetManagementServiceClient() # Create streams and subscriptions for each stream named in driver. self.stream_config = {} for stream_name in streams: pd_id = None try: pd_id = dataset_management.read_parameter_dictionary_by_name( stream_name, id_only=True) except: log.error("No pd_id found for param_dict '%s'" % stream_name) if (self.use_default_stream): log.error("using default pd '%s'" % DEFAULT_STREAM_NAME) pd_id = dataset_management.read_parameter_dictionary_by_name( DEFAULT_STREAM_NAME, id_only=True) if (not pd_id): raise IDKException( "Missing parameter dictionary for stream '%s'" % stream_name) log.debug("parameter dictionary id: %s" % pd_id) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) #log.debug("Stream: %s (%s), stream_def_id %s" % (stream_name, type(stream_name), stream_def_id)) pd = pubsub_client.read_stream_definition( stream_def_id).parameter_dictionary #log.debug("Parameter Dictionary: %s" % pd) try: stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict( stream_route=stream_route, routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, stream_definition_ref=stream_def_id, parameter_dictionary=pd) self.stream_config[stream_name] = stream_config #log.debug("Stream Config (%s): %s" % (stream_name, stream_config)) except Exception as e: log.error("stream publisher exception: %s", e)
def load_data_product(self): dset_i = 0 dataset_management = DatasetManagementServiceClient() pubsub_management = PubsubManagementServiceClient() data_product_management = DataProductManagementServiceClient() resource_registry = self.container.instance.resource_registry dp_obj = DataProduct( name='instrument_data_product_%i' % dset_i, description='ctd stream test', processing_level_code='Parsed_Canonical') pdict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) stream_def_id = pubsub_management.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id) data_product_id = data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id) self.addCleanup(data_product_management.delete_data_product, data_product_id) data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(data_product_management.suspend_data_product_persistence, data_product_id) stream_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasStream', id_only=True) stream_id = stream_ids[0] route = pubsub_management.read_stream_route(stream_id) dataset_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True) dataset_id = dataset_ids[0] return data_product_id, stream_id, route, stream_def_id, dataset_id
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataset_management = self.datasetclient
def on_start(self): super(VizTransformProcForMatplotlibGraphs, self).on_start() #assert len(self.streams)==1 self.initDataFlag = True self.graph_data = { } # Stores a dictionary of variables : [List of values] # Need some clients self.rr_cli = ResourceRegistryServiceProcessClient( process=self, node=self.container.node) self.pubsub_cli = PubsubManagementServiceClient( node=self.container.node) # extract the various parameters passed to the transform process self.out_stream_id = self.CFG.get('process').get( 'publish_streams').get('visualization_service_submit_stream_id') # Create a publisher on the output stream #stream_route = self.pubsub_cli.register_producer(stream_id=self.out_stream_id) out_stream_pub_registrar = StreamPublisherRegistrar( process=self.container, node=self.container.node) self.out_stream_pub = out_stream_pub_registrar.create_publisher( stream_id=self.out_stream_id) self.data_product_id = self.CFG.get('data_product_id') self.stream_def_id = self.CFG.get("stream_def_id") self.stream_def = self.rr_cli.read(self.stream_def_id) # Start the thread responsible for keeping track of time and generating graphs # Mutex for ensuring proper concurrent communications between threads self.lock = RLock() self.rendering_proc = Greenlet(self.rendering_thread) self.rendering_proc.start()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process = TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.resource_registry = self.container.resource_registry
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher()
def build_stream_config(streams): """ """ # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=cc.node) dataset_management = DatasetManagementServiceClient() # Create streams and subscriptions for each stream named in driver. agent_stream_config = {} for (stream_name, param_dict_name) in streams.iteritems(): pd_id = dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) pd = pubsub_client.read_stream_definition( stream_def_id).parameter_dictionary stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict(stream_route=stream_route, routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, stream_definition_ref=stream_def_id, parameter_dictionary=pd) agent_stream_config[stream_name] = stream_config return agent_stream_config
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.dpmsclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.workflowclient = WorkflowManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient( node=self.container.node) self.ctd_stream_def = SBE37_CDM_stream_definition()
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient( node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher()
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container super(TestRSNIntegration, self).setUp() config = DotDict() #config.bootstrap.use_es = True self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.catch_alert = gevent.queue.Queue()
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.dpmsclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # create missing data process definition dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dataprocessclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process( self.process_definitions['ingestion_worker'], configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.data_retriever = DataRetrieverServiceClient(node=self.container.node) self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) # Data async and subscription TODO: Replace with new subscriber self._finished_count = None #TODO: Switch to gevent.queue.Queue self._async_finished_result = AsyncResult() self._finished_events_received = [] self._finished_event_subscriber = None self._start_finished_event_subscriber() self.addCleanup(self._stop_finished_event_subscriber) self.DVR_CONFIG = {} self.DVR_CONFIG = { 'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler', 'dvr_cls' : 'SlocumDataHandler', } self._setup_resources() self.agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : {}, 'agent' : {'resource_id': self.EDA_RESOURCE_ID}, 'test_mode' : True } datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='ExternalDatasetAgentInstance1', description='external data agent instance', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS, dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config ) self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj, external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID) #TG: Setup/configure the granule logger to log granules as they're published pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id) dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id) print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('datasetagentclient', name=pid, process=FakeProcess()) log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))
def __init__(self, data_product_id, interval=1, simple_time=False): self.resource_registry = Container.instance.resource_registry self.pubsub_management = PubsubManagementServiceClient() self.data_product_id = data_product_id self.i=0 self.interval = interval self.simple_time = simple_time self.finished = Event() self.g = gevent.spawn(self.run)
def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) self._get_platform_attributes() url = OmsTestMixin.start_http_server() log.info("TestPlatformInstrument:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.info( "TestPlatformInstrument:setup register_event_listener result %s", result) # response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall') # log.info("TestPlatformInstrument:setup get_platform_ports %s", response) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 177 self.instrument_device = '' self.platform_device = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done)
def _start_data_subscribers(self, count, raw_count): """ """ # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) # Create streams and subscriptions for each stream named in driver. self._data_subscribers = [] self._samples_received = [] self._raw_samples_received = [] self._async_sample_result = AsyncResult() self._async_raw_sample_result = AsyncResult() # A callback for processing subscribed-to data. def recv_data(message, stream_route, stream_id): log.info('Received parsed data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key) self._samples_received.append(message) if len(self._samples_received) == count: self._async_sample_result.set() def recv_raw_data(message, stream_route, stream_id): log.info('Received raw data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key) self._raw_samples_received.append(message) if len(self._raw_samples_received) == raw_count: self._async_raw_sample_result.set() from pyon.util.containers import create_unique_identifier stream_name = 'parsed' parsed_config = self._stream_config[stream_name] stream_id = parsed_config['stream_id'] exchange_name = create_unique_identifier("%s_queue" % stream_name) self._purge_queue(exchange_name) sub = StandaloneStreamSubscriber(exchange_name, recv_data) sub.start() self._data_subscribers.append(sub) sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id], timeout=120.2) pubsub_client.activate_subscription(sub_id, timeout=120.3) sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice) stream_name = 'raw' parsed_config = self._stream_config[stream_name] stream_id = parsed_config['stream_id'] exchange_name = create_unique_identifier("%s_queue" % stream_name) self._purge_queue(exchange_name) sub = StandaloneStreamSubscriber(exchange_name, recv_raw_data) sub.start() self._data_subscribers.append(sub) sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id], timeout=120.4) pubsub_client.activate_subscription(sub_id, timeout=120.5) sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.ingestion_management = IngestionManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.ingest_name = 'basic' self.exchange = 'testdata'
def _stop_data_subscribers(self): for subscriber in self._data_subscribers: pubsub_client = PubsubManagementServiceClient() if hasattr(subscriber,'subscription_id'): try: pubsub_client.deactivate_subscription(subscriber.subscription_id) except: pass pubsub_client.delete_subscription(subscriber.subscription_id) subscriber.stop()
def setUp(self): self._start_container() self.container.start_rel_from_url( 'res/deploy/r2deploy.yml') # Because hey why not?! self.dataset_management = DatasetManagementServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.data_product_management = DataProductManagementServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub_management = PubsubManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.pdicts = {} self.queue_cleanup = list() self.exchange_cleanup = list()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management = DatasetManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.rdt = None self.data_producer_id = None self.provider_metadata_update = None self.event = Event()
def _start_data_subscribers(self, count): """ """ # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) # Create streams and subscriptions for each stream named in driver. self._data_subscribers = [] self._samples_received = [] #self._async_data_result = AsyncResult() strXterm = "xterm -T InstrumentScienceData -sb -rightbar " pOpenString = strXterm + " -e tail -f " + PIPE_PATH subprocess.Popen([ 'xterm', '-T', 'InstrumentScienceData', '-e', 'tail', '-f', PIPE_PATH ]) #subprocess.Popen(pOpenString) #self.pipeData = open(PIPE_PATH, "w", 1) # A callback for processing subscribed-to data. def recv_data(message, stream_route, stream_id): print 'Received message on ' + str(stream_id) + ' (' + str( stream_route.exchange_point) + ',' + str( stream_route.routing_key) + ')' log.info('Received message on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key) self.pipeData = open(PIPE_PATH, "w", 1) self.pipeData.write(str(message)) self.pipeData.flush() self.pipeData.close() self._samples_received.append(message) #if len(self._samples_received) == count: #self._async_data_result.set() for (stream_name, stream_config) in self._stream_config.iteritems(): stream_id = stream_config['stream_id'] # Create subscriptions for each stream. exchange_name = '%s_queue' % stream_name self._purge_queue(exchange_name) sub = StandaloneStreamSubscriber(exchange_name, recv_data) sub.start() self._data_subscribers.append(sub) print 'stream_id: %s' % stream_id sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id]) pubsub_client.activate_subscription(sub_id) sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') config = dict(op="load", scenario="NOSE", attachments="res/preload/r2_ioc/attachments") self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config) self.pubsub = PubsubManagementServiceClient() self.dams = DataAcquisitionManagementServiceClient()