def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2dm.yml') self.unsc = UserNotificationServiceClient(node=self.container.node) self.rrc = ResourceRegistryServiceClient(node=self.container.node) self.imc = IdentityManagementServiceClient(node=self.container.node)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process( self.process_definitions['ingestion_worker'], configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid)
def setUp(self): super(TransformPrototypeIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrc = ResourceRegistryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.ssclient = SchedulerServiceClient() self.event_publisher = EventPublisher() self.user_notification = UserNotificationServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.exchange_names = [] self.exchange_points = []
def setUp(self): # Start container #print 'instantiating container' self._start_container() log.debug("Start rel from url") self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.DPMS = DataProductManagementServiceClient() self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.DAMS = DataAcquisitionManagementServiceClient() self.PSMS = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.PD = ProcessDispatcherServiceClient() self.DSMS = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ log.debug("get datastore") datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.PSMS.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.PD.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] self.addCleanup(self.cleaning_up)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.discovery = DiscoveryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.observatory_management = ObservatoryManagementServiceClient() self.visualization = VisualizationServiceClient() self.ph = ParameterHelper(self.dataset_management, self.addCleanup) self.ctd_count = 0
def setUp(self): # Start container super(TestActivateInstrumentIntegration, self).setUp() config = DotDict() config.bootstrap.use_es = True self._start_container() self.addCleanup(TestActivateInstrumentIntegration.es_cleanup) self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.usernotificationclient = UserNotificationServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher()
def setUp(self): self._start_container() # patch the CFG service.user_notification.max_daily_notifications value so we only test 10 original_CFG_max = CFG.get_safe( "service.user_notification.max_daily_notifications", 1000) CFG['service']['user_notification']['max_daily_notifications'] = 10 self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.object_store = self.container.object_store self.resource_registry = self.container.resource_registry self.user_notification = UserNotificationServiceClient() self.event_publisher = EventPublisher() # create UserInfo object (user) user = UserInfo() user.name = 'Iceman' user.contact.email = '*****@*****.**' user_id, _ = self.resource_registry.create(user) self.user = self.resource_registry.read(user_id) # create NotificationRequest objects (notifications) # 4 notifications are created: # REAL_TIME, EMAIL(user default via UserInfo) # REAL_TIME, EMAIL(in DeliveryConfiguration) # DISABLED, EMAIL(in DeliveryConfiguration) # REAL_TIME, SMS(in DeliveryConfiguration) # REAL_TIME, EMAIL(user default via UserInfo) delivery_configuration = IonObject( OT.DeliveryConfiguration, mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.REAL_TIME) notification_request = IonObject( OT.NotificationRequest, name='REAL_TIME to default UserInfo email', type=NotificationTypeEnum.SIMPLE, origin='Miramar', event_type=OT.ResourceLifecycleEvent, delivery_configurations=[delivery_configuration]) # store this notification_id to check disabled_by_system status later self.notification_id = self.user_notification.create_notification( notification=notification_request, user_id=self.user._id) # REAL_TIME, EMAIL(in DeliveryConfiguration), 10 notifications/day max delivery_configuration = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.REAL_TIME) notification_request = IonObject( OT.NotificationRequest, name='REAL_TIME to alternate email, 10 notifications/day max', type=NotificationTypeEnum.SIMPLE, origin="Miramar", event_type=OT.ResourceLifecycleEvent, delivery_configurations=[delivery_configuration]) self.user_notification.create_notification( notification=notification_request, user_id=self.user._id) # DISABLED, EMAIL(in DeliveryConfiguration) delivery_configuration = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.DISABLED) notification_request = IonObject( OT.NotificationRequest, name='DISABLED to alternate email', type=NotificationTypeEnum.SIMPLE, origin="Miramar", event_type=OT.ResourceLifecycleEvent, delivery_configurations=[delivery_configuration]) self.user_notification.create_notification( notification=notification_request, user_id=self.user._id) # REAL_TIME, SMS(in DeliveryConfiguration) delivery_configuration = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.SMS, frequency=NotificationFrequencyEnum.REAL_TIME) notification_request = IonObject( OT.NotificationRequest, name='SMS to alternate email', type=NotificationTypeEnum.SIMPLE, origin="Miramar", event_type=OT.ResourceLifecycleEvent, delivery_configurations=[delivery_configuration]) self.user_notification.create_notification( notification=notification_request, user_id=self.user._id)