def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.data_retriever = DataRetrieverServiceClient(node=self.container.node) self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) # Data async and subscription TODO: Replace with new subscriber self._finished_count = None #TODO: Switch to gevent.queue.Queue self._async_finished_result = AsyncResult() self._finished_events_received = [] self._finished_event_subscriber = None self._start_finished_event_subscriber() self.addCleanup(self._stop_finished_event_subscriber) self.DVR_CONFIG = {} self.DVR_CONFIG = { 'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler', 'dvr_cls' : 'SlocumDataHandler', } self._setup_resources() self.agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : {}, 'agent' : {'resource_id': self.EDA_RESOURCE_ID}, 'test_mode' : True } datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='ExternalDatasetAgentInstance1', description='external data agent instance', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS, dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config ) self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj, external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID) #TG: Setup/configure the granule logger to log granules as they're published pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id) dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id) print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('datasetagentclient', name=pid, process=FakeProcess()) log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient( node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher()
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.dpmsclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # create missing data process definition dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dataprocessclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container super(TestActivateRSNVel3DInstrument, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher()
def setUp(self): self._start_container() self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml') # self.eoas_cli = ExternalObservatoryAgentServiceClient() # self.rr_cli = ResourceRegistryServiceClient() self.dams_cli = DataAcquisitionManagementServiceClient() self.dpms_cli = DataProductManagementServiceClient() self._setup_ncom() self._setup_hfr() # eoas_proc = self.container.proc_manager.procs_by_name['external_data_agent_management'] # log.debug("Got EOAS Process: %s" % eoas_proc) self._ncom_agt_cli = ResourceAgentClient( resource_id=self.ncom_ds_id, name='external_observatory_agent', process=FakeProcess()) log.debug("Got a ResourceAgentClient: res_id=%s" % self._ncom_agt_cli.resource_id) self._hfr_agt_cli = ResourceAgentClient( resource_id=self.hfr_ds_id, name='external_observatory_agent', process=FakeProcess()) log.debug("Got a ResourceAgentClient: res_id=%s" % self._hfr_agt_cli.resource_id)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.dpmsclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.workflowclient = WorkflowManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient( node=self.container.node) self.ctd_stream_def = SBE37_CDM_stream_definition()
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataset_management = self.datasetclient
def setUp(self): # Start container super(TestRSNIntegration, self).setUp() config = DotDict() #config.bootstrap.use_es = True self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.catch_alert = gevent.queue.Queue()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process = TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = []
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process( self.process_definitions['ingestion_worker'], configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid)
def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) self._get_platform_attributes() url = OmsTestMixin.start_http_server() log.info("TestPlatformInstrument:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.info( "TestPlatformInstrument:setup register_event_listener result %s", result) # response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall') # log.info("TestPlatformInstrument:setup get_platform_ports %s", response) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 177 self.instrument_device = '' self.platform_device = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub_management = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node) self.resource_registry = self.container.resource_registry self.context_ids = self.build_param_contexts() self.setup_resources()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') config = dict(op="load", scenario="NOSE", attachments="res/preload/r2_ioc/attachments") self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config) self.pubsub = PubsubManagementServiceClient() self.dams = DataAcquisitionManagementServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() # Use the network definition provided by RSN OMS directly. rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri']) self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms) # get serialized version for the configuration: self._network_definition_ser = NetworkUtil.serialize_network_definition( self._network_definition) if log.isEnabledFor(logging.DEBUG): log.debug("NetworkDefinition serialization:\n%s", self._network_definition_ser) self.platformModel_id = None self.all_platforms = {} self.agent_streamconfig_map = {} self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber() self._set_up_DataProduct_obj() self._set_up_PlatformModel_obj()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.client.DPMS = DataProductManagementServiceClient(node=self.container.node) self.client.IMS = InstrumentManagementServiceClient(node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.client.PSMS = PubsubManagementServiceClient(node=self.container.node) self.client.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR = self.client.RR
def setUp(self): # Start container #print 'instantiating container' self._start_container() log.debug("Start rel from url") self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.DPMS = DataProductManagementServiceClient() self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.DAMS = DataAcquisitionManagementServiceClient() self.PSMS = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.PD = ProcessDispatcherServiceClient() self.DSMS = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ log.debug("get datastore") datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.PSMS.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.PD.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] self.addCleanup(self.cleaning_up)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') log.debug("TestExternalDatasetAgentMgmt: started services") # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient() self.org_management_service = OrgManagementServiceClient() self.IMS = InstrumentManagementServiceClient() self.dpclient = DataProductManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.data_product_management = DataProductManagementServiceClient() self._load_stage = 0 self._resources = {}
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.discovery = DiscoveryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.observatory_management = ObservatoryManagementServiceClient() self.visualization = VisualizationServiceClient() self.ph = ParameterHelper(self.dataset_management, self.addCleanup) self.ctd_count = 0
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.catch_alert = gevent.queue.Queue()
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.client.DPRS.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR)