def _get_process_definition(self): process_definition_id = self.CFG.get_safe( "highavailability.process_definition_id") process_definition_name = self.CFG.get_safe( "highavailability.process_definition_name") if process_definition_id: pd_name = self.pds[0] pd = ProcessDispatcherServiceClient(to_name=pd_name) definition = pd.read_process_definition(process_definition_id) elif process_definition_name: definitions, _ = self.container.resource_registry.find_resources( restype="ProcessDefinition", name=process_definition_name) if len(definitions) == 0: raise Exception("Process definition with name '%s' not found" % process_definition_name) elif len(definitions) > 1: raise Exception( "multiple process definitions found with name '%s'" % process_definition_name) definition = definitions[0] process_definition_id = definition._id else: raise Exception( "HA Agent requires either process definition ID or name") return process_definition_id, definition
def setUp(self): # Start container super(TestActivateRSNVel3DInstrument, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def _get_process_definition(self): process_definition_id = self.CFG.get_safe("highavailability.process_definition_id") process_definition_name = self.CFG.get_safe("highavailability.process_definition_name") if process_definition_id: pd_name = self.pds[0] pd = ProcessDispatcherServiceClient(to_name=pd_name) definition = pd.read_process_definition(process_definition_id) elif process_definition_name: definitions, _ = self.container.resource_registry.find_resources( restype="ProcessDefinition", name=process_definition_name) if len(definitions) == 0: raise Exception("Process definition with name '%s' not found" % process_definition_name) elif len(definitions) > 1: raise Exception("multiple process definitions found with name '%s'" % process_definition_name) definition = definitions[0] process_definition_id = definition._id else: raise Exception("HA Agent requires either process definition ID or name") return process_definition_id, definition
def run_reverse_transform(self): ''' Runs a reverse transform example and displays the results of performing the transform ''' tms_cli = TransformManagementServiceClient(node=self.container.node) procd_cli = ProcessDispatcherServiceClient(node=self.container.node) #------------------------------- # Process Definition #------------------------------- process_definition = IonObject(RT.ProcessDefinition, name='transform_process_definition') process_definition.executable = { 'module': 'ion.processes.data.transforms.transform_example', 'class': 'ReverseTransform' } process_definition_id = procd_cli.create_process_definition( process_definition) #------------------------------- # Execute Transform #------------------------------- input = [1, 2, 3, 4] retval = tms_cli.execute_transform( process_definition_id=process_definition_id, data=[1, 2, 3, 4], configuration={}) log.debug('Transform Input: %s', input) log.debug('Transform Output: %s', retval)
def __init__(self, timeout_spawn): """ @param timeout_spawn Default timeout in secs for the RUNNING event. """ self._timeout_spawn = timeout_spawn self._pd_client = ProcessDispatcherServiceClient() self._agent_launcher = AgentLauncher(self._pd_client)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataset_management = self.datasetclient
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.workflowclient = WorkflowManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient( node=self.container.node) self.ctd_stream_def = SBE37_CDM_stream_definition()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.process_definition = IonObject(OT.ProcessDefinition, name='test_process') self.process_definition.executable = { 'module': 'ion.services.cei.test.test_process_state_gate', 'class': 'TestProcess' } self.process_definition_id = self.pd_cli.create_process_definition( self.process_definition) self.event_queue = queue.Queue() self.process_schedule = IonObject(OT.ProcessSchedule) self.process_schedule.queueing_mode = ProcessQueueingMode.ALWAYS self.pid = self.pd_cli.create_process(self.process_definition_id) self.event_queue = queue.Queue() self.event_sub = EventSubscriber(event_type="ProcessLifecycleEvent", callback=self._event_callback, origin=self.pid, origin_type="DispatchedProcess")
def _register_service(self): if not self.process_definition_id: log.error("No process definition id. Not registering service") return if len(self.pds) < 1: log.error("Must have at least one PD available to register a service") return pd_name = self.pds[0] pd = ProcessDispatcherServiceClient(to_name=pd_name) definition = pd.read_process_definition(self.process_definition_id) existing_services, _ = self.container.resource_registry.find_resources( restype="Service", name=definition.name) if len(existing_services) > 0: if len(existing_services) > 1: log.warning("There is more than one service object for %s. Using the first one" % definition.name) service_id = existing_services[0]._id else: svc_obj = Service(name=definition.name, exchange_name=definition.name) service_id, _ = self.container.resource_registry.create(svc_obj) svcdefs, _ = self.container.resource_registry.find_resources( restype="ServiceDefinition", name=definition.name) if svcdefs: self.container.resource_registry.create_association( service_id, "hasServiceDefinition", svcdefs[0]._id) else: log.error("Cannot find ServiceDefinition resource for %s", definition.name) return service_id
def run_reverse_transform(self): ''' Runs a reverse transform example and displays the results of performing the transform ''' tms_cli = TransformManagementServiceClient(node=self.container.node) procd_cli = ProcessDispatcherServiceClient(node=self.container.node) #------------------------------- # Process Definition #------------------------------- process_definition = IonObject(RT.ProcessDefinition, name='transform_process_definition') process_definition.executable = { 'module': 'ion.processes.data.transforms.transform_example', 'class':'ReverseTransform' } process_definition_id = procd_cli.create_process_definition(process_definition) #------------------------------- # Execute Transform #------------------------------- input = [1,2,3,4] retval = tms_cli.execute_transform(process_definition_id=process_definition_id, data=[1,2,3,4], configuration={}) log.debug('Transform Input: %s', input) log.debug('Transform Output: %s', retval)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process( self.process_definitions['ingestion_worker'], configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid)
def upload_qc(): upload_folder = FileSystem.get_url(FS.TEMP, 'uploads') try: object_store = Container.instance.object_store # required fields upload = request.files['file'] # <input type=file name="file"> if upload: # upload file - run filename through werkzeug.secure_filename filename = secure_filename(upload.filename) path = os.path.join(upload_folder, filename) upload_time = time.time() upload.save(path) filetype = _check_magic( upload) or 'CSV' # Either going to be ZIP or CSV, probably # register upload file_upload_context = { 'name': 'User uploaded QC file %s' % filename, 'filename': filename, 'filetype': filetype, # only CSV, no detection necessary 'path': path, 'upload_time': upload_time, 'status': 'File uploaded to server' } fuc_id, _ = object_store.create_doc(file_upload_context) # client to process dispatch pd_client = ProcessDispatcherServiceClient() # create process definition process_definition = ProcessDefinition( name='upload_qc_processor', executable={ 'module': 'ion.processes.data.upload.upload_qc_processing', 'class': 'UploadQcProcessing' }) process_definition_id = pd_client.create_process_definition( process_definition) # create process process_id = pd_client.create_process(process_definition_id) #schedule process config = DotDict() config.process.fuc_id = fuc_id pid = pd_client.schedule_process(process_definition_id, process_id=process_id, configuration=config) log.info('UploadQcProcessing process created %s' % pid) # response - only FileUploadContext ID and determined filetype for UX display resp = {'fuc_id': fuc_id} return gateway_json_response(resp) raise BadRequest('Invalid Upload') except Exception as e: return build_error_response(e)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.data_retriever = DataRetrieverServiceClient(node=self.container.node) self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) # Data async and subscription TODO: Replace with new subscriber self._finished_count = None #TODO: Switch to gevent.queue.Queue self._async_finished_result = AsyncResult() self._finished_events_received = [] self._finished_event_subscriber = None self._start_finished_event_subscriber() self.addCleanup(self._stop_finished_event_subscriber) self.DVR_CONFIG = {} self.DVR_CONFIG = { 'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler', 'dvr_cls' : 'SlocumDataHandler', } self._setup_resources() self.agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : {}, 'agent' : {'resource_id': self.EDA_RESOURCE_ID}, 'test_mode' : True } datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='ExternalDatasetAgentInstance1', description='external data agent instance', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS, dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config ) self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj, external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID) #TG: Setup/configure the granule logger to log granules as they're published pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id) dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id) print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('datasetagentclient', name=pid, process=FakeProcess()) log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))
def on_start(self): self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe( 'server.smtp.sender') # Create an event processor self.event_processor = EmailEventProcessor() # Dictionaries that maintain information asetting_up_smtp_clientbout users and their subscribed notifications self.user_info = {} # The reverse_user_info is calculated from the user_info dictionary self.reverse_user_info = {} # Get the clients # @TODO: Why are these not dependencies in the service YML??? self.discovery = DiscoveryServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.event_publisher = EventPublisher() self.datastore = self.container.datastore_manager.get_datastore( 'events') self.start_time = get_ion_ts() #------------------------------------------------------------------------------------ # Create an event subscriber for Reload User Info events #------------------------------------------------------------------------------------ def reload_user_info(event_msg, headers): """ Callback method for the subscriber to ReloadUserInfoEvent """ notification_id = event_msg.notification_id log.debug( "(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id) try: self.user_info = self.load_user_info() except NotFound: log.warning("ElasticSearch has not yet loaded the user_index.") self.reverse_user_info = calculate_reverse_user_info( self.user_info) log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info) log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info) # the subscriber for the ReloadUSerInfoEvent self.reload_user_info_subscriber = EventSubscriber( event_type=OT.ReloadUserInfoEvent, origin='UserNotificationService', callback=reload_user_info) self.add_endpoint(self.reload_user_info_subscriber)
def upload_qc(): upload_folder = FileSystem.get_url(FS.TEMP,'uploads') try: object_store = Container.instance.object_store # required fields upload = request.files['file'] # <input type=file name="file"> if upload: # upload file - run filename through werkzeug.secure_filename filename = secure_filename(upload.filename) path = os.path.join(upload_folder, filename) upload_time = time.time() upload.save(path) filetype = _check_magic(upload) or 'CSV' # Either going to be ZIP or CSV, probably # register upload file_upload_context = { 'name': 'User uploaded QC file %s' % filename, 'filename': filename, 'filetype': filetype, # only CSV, no detection necessary 'path': path, 'upload_time': upload_time, 'status': 'File uploaded to server' } fuc_id, _ = object_store.create_doc(file_upload_context) # client to process dispatch pd_client = ProcessDispatcherServiceClient() # create process definition process_definition = ProcessDefinition( name='upload_qc_processor', executable={ 'module': 'ion.processes.data.upload.upload_qc_processing', 'class': 'UploadQcProcessing' } ) process_definition_id = pd_client.create_process_definition(process_definition) # create process process_id = pd_client.create_process(process_definition_id) # schedule process config = DotDict() config.process.fuc_id = fuc_id pid = pd_client.schedule_process(process_definition_id, process_id=process_id, configuration=config) log.info('UploadQcProcessing process created %s' % pid) # response - only FileUploadContext ID and determined filetype for UX display resp = {'fuc_id': fuc_id} return gateway_json_response(resp) raise BadRequest('Invalid Upload') except Exception as e: return build_error_response(e)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') #self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.pd_cli = ProcessDispatcherServiceClient(to_name="process_dispatcher") self.process_definition_id = uuid4().hex self.process_definition_name = 'test' self.process_definition = ProcessDefinition(name=self.process_definition_name, executable={ 'module': 'ion.agents.cei.test.test_haagent', 'class': 'TestProcess' }) self.pd_cli.create_process_definition(self.process_definition, self.process_definition_id) self.resource_id = "haagent_1234" self._haa_name = "high_availability_agent" self._haa_dashi_name = "dashi_haa_" + uuid4().hex self._haa_dashi_uri = get_dashi_uri_from_cfg() self._haa_dashi_exchange = "%s.hatests" % bootstrap.get_sys_name() self._haa_config = { 'highavailability': { 'policy': { 'interval': 1, 'name': 'npreserving', 'parameters': { 'preserve_n': 0 } }, 'process_definition_id': self.process_definition_id, 'dashi_messaging' : True, 'dashi_exchange' : self._haa_dashi_exchange, 'dashi_name': self._haa_dashi_name }, 'agent': {'resource_id': self.resource_id}, } self._base_services, _ = self.container.resource_registry.find_resources( restype="Service", name=self.process_definition_name) self._base_procs = self.pd_cli.list_processes() self.waiter = ProcessStateWaiter() self.waiter.start() self.container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) self._haa_pid = self.container_client.spawn_process(name=self._haa_name, module="ion.agents.cei.high_availability_agent", cls="HighAvailabilityAgent", config=self._haa_config) # Start a resource agent client to talk with the instrument agent. self._haa_pyon_client = SimpleResourceAgentClient(self.resource_id, process=FakeProcess()) log.info('Got haa client %s.', str(self._haa_pyon_client)) self.haa_client = HighAvailabilityAgentClient(self._haa_pyon_client)
def __init__(self, use_gate=True): """ @param use_gate True (the default) to use ProcessStateGate pattern. Otherwise, use the "create_process/subscribe-to-event/schedule_process/_await_state_event" pattern (as described in https://confluence.oceanobservatories.org/display/CIDev/R2+Process+Dispatcher+Guide as of Sept 14/12). """ self._use_gate = use_gate self._pd_client = ProcessDispatcherServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub_management = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node) self.resource_registry = self.container.resource_registry self.context_ids = self.build_param_contexts() self.setup_resources()
def begin(self): from interface.services.cei.iprocess_dispatcher_service import ProcessDispatcherServiceClient from pyon.net.messaging import make_node from pyon.core import bootstrap from pyon.public import CFG self.base_pids = [] self.rpc_timeout = 2 self._procs_by_test = {} if not bootstrap.pyon_initialized: bootstrap.bootstrap_pyon() self.node, self.ioloop = make_node() self.node.setup_interceptors(CFG.interceptor) self.pd_cli = ProcessDispatcherServiceClient(node=self.node)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def run_external_transform(self): ''' This example script illustrates how a transform can interact with the an outside process (very basic) it launches an external_transform example which uses the operating system command 'bc' to add 1 to the input Producer -> A -> 'FS.TEMP/transform_output' A is an external transform that spawns an OS process to increment the input by 1 ''' pubsub_cli = PubsubManagementServiceClient(node=self.container.node) tms_cli = TransformManagementServiceClient(node=self.container.node) procd_cli = ProcessDispatcherServiceClient(node=self.container.node) #------------------------------- # Process Definition #------------------------------- process_definition = ProcessDefinition(name='external_transform_definition') process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example' process_definition.executable['class'] = 'ExternalTransform' process_definition_id = procd_cli.create_process_definition(process_definition=process_definition) #------------------------------- # Streams #------------------------------- input_stream_id = pubsub_cli.create_stream(name='input_stream', original=True) #------------------------------- # Subscription #------------------------------- query = StreamQuery(stream_ids=[input_stream_id]) input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='input_queue') #------------------------------- # Launch Transform #------------------------------- transform_id = tms_cli.create_transform(name='external_transform', in_subscription_id=input_subscription_id, process_definition_id=process_definition_id, configuration={}) tms_cli.activate_transform(transform_id) #------------------------------- # Launch Producer #------------------------------- id_p = self.container.spawn_process('myproducer', 'ion.processes.data.transforms.transform_example', 'TransformExampleProducer', {'process':{'type':'stream_process','publish_streams':{'out_stream':input_stream_id}},'stream_producer':{'interval':4000}}) self.container.proc_manager.procs[id_p].start()
def setUp(self): self._start_container() self.datastore_name = CACHE_DATASTORE_NAME self.container.start_rel_from_url('res/deploy/r2dm.yml') self.db = self.container.datastore_manager.get_datastore(self.datastore_name,DataStore.DS_PROFILE.SCIDATA) self.tms_cli = TransformManagementServiceClient() self.pubsub_cli = PubsubManagementServiceClient() self.pd_cli = ProcessDispatcherServiceClient() self.rr_cli = ResourceRegistryServiceClient() xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([bootstrap.get_sys_name(), xp_base]) except ValueError: raise StandardError('Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp)
def __init__(self, pd_name, resource_registry, service_id, callback=None, logprefix=""): self.pd_name = pd_name self.resource_registry = resource_registry self.service_id = service_id self.callback = callback if callback and not callable(callback): raise ValueError("callback is not callable") self.logprefix = logprefix self.client = ProcessDispatcherServiceClient(to_name=pd_name) self.event_sub = EventSubscriber(event_type="ProcessLifecycleEvent", callback=self._event_callback, origin_type="DispatchedProcess", auto_delete=True) self.processes = {}
def setUp(self): super(EventManagementIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.event_management = EventManagementServiceClient() self.rrc = ResourceRegistryServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.pubsub = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.queue_cleanup = [] self.exchange_cleanup = []
def setUp(self): # Start container self._start_container() #self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # set up a fake dashi consumer to act as the PD try: import dashi except ImportError: raise SkipTest("Process Dispatcher Bridge integration test "+ "requires the dashi library. Skipping.") self.fake_pd = FakePD(dashi.DashiConnection(self.dashi_pd_topic, self.dashi_uri, self.dashi_exchange)) self.fake_pd.consume_in_thread() # set up the container self._start_container() self.cc = ContainerAgentClient(node=self.container.node, name=self.container.name) CFG['process_dispatcher_bridge'] = dict(uri="memory://local", exchange="test_pd_bridge_exchange", topic="processdispatcher") self.cc.start_rel_from_url('res/deploy/r2cei.yml') self.pd_cli = ProcessDispatcherServiceClient(node=self.cc.node) self.process_definition = ProcessDefinition(name='basic_transform_definition') self.process_definition.executable = {'module': 'ion.processes.data.transforms.transform_example', 'class':'TransformExample'} self.process_definition_id = self.pd_cli.create_process_definition(self.process_definition)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.process_definition = ProcessDefinition(name='test_process') self.process_definition.executable = { 'module': 'ion.services.cei.test.test_process_dispatcher', 'class': 'TestProcess' } self.process_definition_id = self.pd_cli.create_process_definition( self.process_definition) self.event_queue = queue.Queue() self.event_sub = None
def setUp(self): # Start container super(TestRSNIntegration, self).setUp() config = DotDict() #config.bootstrap.use_es = True self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.catch_alert = gevent.queue.Queue()
def setUp(self): super(CtdTransformsIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.queue_cleanup = [] self.exchange_cleanup = [] self.pubsub = PubsubManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.exchange_name = 'ctd_L0_all_queue' self.exchange_point = 'test_exchange' self.i = 0
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.dpmsclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container super(TestActivateInstrumentIntegration, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.usernotificationclient = UserNotificationServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataretrieverclient = DataRetrieverServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #setup listerner vars self._data_greenlets = [] self._no_samples = None self._samples_received = [] self.event_publisher = EventPublisher()
def setUp(self): super(DataRetrieverServiceIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2dm.yml') self.couch = self.container.datastore_manager.get_datastore( 'test_data_retriever', profile=DataStore.DS_PROFILE.SCIDATA) self.datastore_name = 'test_data_retriever' self.dr_cli = DataRetrieverServiceClient(node=self.container.node) self.dsm_cli = DatasetManagementServiceClient(node=self.container.node) self.rr_cli = ResourceRegistryServiceClient(node=self.container.node) self.ps_cli = PubsubManagementServiceClient(node=self.container.node) self.tms_cli = TransformManagementServiceClient( node=self.container.node) self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([bootstrap.get_sys_name(), xp_base]) except ValueError: raise StandardError( 'Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp) self.thread_pool = list()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') self.rr_cli = ResourceRegistryServiceClient() self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.process_definition = ProcessDefinition(name='test_process') self.process_definition.executable = { 'module': 'ion.services.cei.test.test_process_dispatcher', 'class': 'TestProcess' } self.process_definition_id = self.pd_cli.create_process_definition( self.process_definition) self.waiter = ProcessStateWaiter()
def setUp(self): # set up the container self._start_container() self.container.start_rel_from_url("res/deploy/r2dm.yml") self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node) self.tms_cli = TransformManagementServiceClient(node=self.container.node) self.rr_cli = ResourceRegistryServiceClient(node=self.container.node) self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.input_stream_id = self.pubsub_cli.create_stream(name="input_stream", original=True) self.input_subscription_id = self.pubsub_cli.create_subscription( query=StreamQuery(stream_ids=[self.input_stream_id]), exchange_name="transform_input", name="input_subscription", ) self.output_stream_id = self.pubsub_cli.create_stream(name="output_stream", original=True) self.process_definition = ProcessDefinition(name="basic_transform_definition") self.process_definition.executable = { "module": "ion.processes.data.transforms.transform_example", "class": "TransformExample", } self.process_definition_id = self.procd_cli.create_process_definition( process_definition=self.process_definition )
def setUp(self): self.dashi = None self._start_container() self.container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) self.container = self.container_client._get_container_instance() app = dict(name="process_dispatcher", processapp=("process_dispatcher", "ion.services.cei.process_dispatcher_service", "ProcessDispatcherService")) self.container.start_app(app, config=pd_config) self.rr_cli = self.container.resource_registry self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.process_definition = ProcessDefinition(name='test_process') self.process_definition.executable = {'module': 'ion.services.cei.test.test_process_dispatcher', 'class': 'TestProcess'} self.process_definition_id = self.pd_cli.create_process_definition(self.process_definition) self._eea_pids = [] self._tmpdirs = [] self.dashi = get_dashi(uuid.uuid4().hex, pd_config['processdispatcher']['dashi_uri'], pd_config['processdispatcher']['dashi_exchange']) #send a fake node_state message to PD's dashi binding. self.node1_id = uuid.uuid4().hex self._send_node_state("engine1", self.node1_id) self._start_eeagent(self.node1_id) self.waiter = ProcessStateWaiter()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process = TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def on_start(self): #--------------------------------------------------------------------------------------------------- # Get the event Repository #--------------------------------------------------------------------------------------------------- self.event_repo = self.container.instance.event_repository self.smtp_client = setting_up_smtp_client() self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**' #--------------------------------------------------------------------------------------------------- # Create an event processor #--------------------------------------------------------------------------------------------------- self.event_processor = EmailEventProcessor(self.smtp_client) #--------------------------------------------------------------------------------------------------- # load event originators, types, and table #--------------------------------------------------------------------------------------------------- self.notifications = {} #--------------------------------------------------------------------------------------------------- # Get the clients #--------------------------------------------------------------------------------------------------- self.discovery = DiscoveryServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.event_publisher = EventPublisher() self.start_time = UserNotificationService.makeEpochTime(self.__now())
def __init__(self, timeout_spawn): """ @param timeout_spawn Default timeout in secs for the RUNNING event. """ self._timeout_spawn = timeout_spawn self._pd_client = ProcessDispatcherServiceClient() self._agent_launcher = AgentLauncher(self._pd_client)
def on_start(self): #--------------------------------------------------------------------------------------------------- # Get the event Repository #--------------------------------------------------------------------------------------------------- self.event_repo = self.container.instance.event_repository self.smtp_client = setting_up_smtp_client() self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**' #--------------------------------------------------------------------------------------------------- # Create an event processor #--------------------------------------------------------------------------------------------------- self.event_processor = EmailEventProcessor(self.smtp_client) #--------------------------------------------------------------------------------------------------- # load event originators, types, and table #--------------------------------------------------------------------------------------------------- self.event_types = CFG.event.types self.event_table = {} #--------------------------------------------------------------------------------------------------- # Get the clients #--------------------------------------------------------------------------------------------------- self.discovery = DiscoveryServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.datastore_manager = DatastoreManager() self.event_publisher = EventPublisher() self.scheduler_service = SchedulerService()
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.dpmsclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # create missing data process definition dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dataprocessclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def on_start(self): super(CacheLauncher, self).on_start() tms_cli = TransformManagementServiceClient() pubsub_cli = PubsubManagementServiceClient() pd_cli = ProcessDispatcherServiceClient() dname = CACHE_DATASTORE_NAME number_of_workers = self.CFG.get_safe('process.number_of_workers', 2) proc_def = ProcessDefinition( name='last_update_worker_process', description= 'Worker process for caching the last update from a stream') proc_def.executable['module'] = 'ion.processes.data.last_update_cache' proc_def.executable['class'] = 'LastUpdateCache' proc_def_id = pd_cli.create_process_definition( process_definition=proc_def) xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([get_sys_name(), xp_base]) except ValueError: raise StandardError( 'Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp) subscription_id = pubsub_cli.create_subscription( query=ExchangeQuery(), exchange_name='last_update_cache') config = { 'couch_storage': { 'datastore_name': dname, 'datastore_profile': 'SCIDATA' } } for i in xrange(number_of_workers): transform_id = tms_cli.create_transform( name='last_update_cache%d' % i, description= 'last_update that compiles an aggregate of metadata', in_subscription_id=subscription_id, process_definition_id=proc_def_id, configuration=config) tms_cli.activate_transform(transform_id=transform_id)
def create_process(name= '', module = '', class_name = '', configuration = None): ''' A helper method to create a process ''' producer_definition = ProcessDefinition(name=name) producer_definition.executable = { 'module':module, 'class': class_name } process_dispatcher = ProcessDispatcherServiceClient() procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition) pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration) return pid
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataAcquisitionManagementService self.client = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.data_retriever = DataRetrieverServiceClient(node=self.container.node) self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) # Data async and subscription TODO: Replace with new subscriber self._finished_count = None #TODO: Switch to gevent.queue.Queue self._async_finished_result = AsyncResult() self._finished_events_received = [] self._finished_event_subscriber = None self._start_finished_event_subscriber() self.addCleanup(self._stop_finished_event_subscriber) self.DVR_CONFIG = {} self.DVR_CONFIG = { 'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler', 'dvr_cls' : 'SlocumDataHandler', } self._setup_resources() self.agent_config = { 'driver_config' : self.DVR_CONFIG, 'stream_config' : {}, 'agent' : {'resource_id': self.EDA_RESOURCE_ID}, 'test_mode' : True } datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='ExternalDatasetAgentInstance1', description='external data agent instance', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS, dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config ) self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj, external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID) #TG: Setup/configure the granule logger to log granules as they're published pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id) dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id) print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj # Start a resource agent client to talk with the instrument agent. self._ia_client = ResourceAgentClient('datasetagentclient', name=pid, process=FakeProcess()) log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore(datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition(name='ingestion worker') ingestion_worker_definition.executable = { 'module':'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class' :'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid) self.addCleanup(self.cleaning_up)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') #self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.pd_cli = ProcessDispatcherServiceClient(to_name="process_dispatcher") self.process_definition_id = uuid4().hex self.process_definition_name = 'test' self.process_definition = ProcessDefinition(name=self.process_definition_name, executable={ 'module': 'ion.agents.cei.test.test_haagent', 'class': 'TestProcess' }) self.pd_cli.create_process_definition(self.process_definition, self.process_definition_id) self.resource_id = "haagent_1234" self._haa_name = "high_availability_agent" self._haa_dashi_name = "dashi_haa_" + uuid4().hex self._haa_dashi_uri = get_dashi_uri_from_cfg() self._haa_dashi_exchange = "%s.hatests" % bootstrap.get_sys_name() self._haa_config = { 'highavailability': { 'policy': { 'interval': 1, 'name': 'npreserving', 'parameters': { 'preserve_n': 0 } }, 'process_definition_id': self.process_definition_id, 'dashi_messaging' : True, 'dashi_exchange' : self._haa_dashi_exchange, 'dashi_name': self._haa_dashi_name }, 'agent': {'resource_id': self.resource_id}, } self._base_services, _ = self.container.resource_registry.find_resources( restype="Service", name=self.process_definition_name) self._base_procs = self.pd_cli.list_processes() self.waiter = ProcessStateWaiter() self.waiter.start() self.container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) self._haa_pid = self.container_client.spawn_process(name=self._haa_name, module="ion.agents.cei.high_availability_agent", cls="HighAvailabilityAgent", config=self._haa_config) # Start a resource agent client to talk with the instrument agent. self._haa_pyon_client = SimpleResourceAgentClient(self.resource_id, process=FakeProcess()) log.info('Got haa client %s.', str(self._haa_pyon_client)) self.haa_client = HighAvailabilityAgentClient(self._haa_pyon_client)
class ProcessDispatcherServiceLocalIntTest(IonIntegrationTestCase): """Integration tests for the "local" mode of the PD In local mode processes are directly started on the local container. This is provided to allow use of PD functionality when launching via a single rel file instead of via a "real" CEI launch. """ def setUp(self): # ensure bridge mode is disabled if 'process_dispatcher_bridge' in CFG: del CFG['process_dispatcher_bridge'] # set up the container self._start_container() self.cc = ContainerAgentClient(node=self.container.node, name=self.container.name) self.cc.start_rel_from_url('res/deploy/r2cei.yml') self.pd_cli = ProcessDispatcherServiceClient(node=self.cc.node) self.process_definition = ProcessDefinition(name='basic_transform_definition') self.process_definition.executable = {'module': 'ion.processes.data.transforms.transform_example', 'class':'TransformExample'} self.process_definition_id = self.pd_cli.create_process_definition(self.process_definition) def test_schedule_cancel(self): process_schedule = ProcessSchedule() pid = self.pd_cli.schedule_process(self.process_definition_id, process_schedule, configuration={}) proc = self.container.proc_manager.procs.get(pid) self.assertIsInstance(proc, TransformExample) # failures could theoretically leak processes but I don't think that # matters since everything gets torn down between tests self.pd_cli.cancel_process(pid) self.assertNotIn(pid, self.container.proc_manager.procs)
def on_start(self): self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe('server.smtp.sender') # Create an event processor self.event_processor = EmailEventProcessor() # Event originators, types, and table self.notifications = {} # Dictionaries that maintain information about users and their subscribed notifications self.user_info = {} # The reverse_user_info is calculated from the user_info dictionary self.reverse_user_info = {} # Get the clients # @TODO: Why are these not dependencies in the service YML??? self.discovery = DiscoveryServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.event_publisher = EventPublisher() self.datastore = self.container.datastore_manager.get_datastore('events') self.start_time = get_ion_ts() #------------------------------------------------------------------------------------ # Create an event subscriber for Reload User Info events #------------------------------------------------------------------------------------ def reload_user_info(event_msg, headers): ''' Callback method for the subscriber to ReloadUserInfoEvent ''' notification_id = event_msg.notification_id log.debug("(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id) try: self.user_info = self.load_user_info() except NotFound: log.warning("ElasticSearch has not yet loaded the user_index.") self.reverse_user_info = calculate_reverse_user_info(self.user_info) log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info) log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info) # the subscriber for the ReloadUSerInfoEvent self.reload_user_info_subscriber = EventSubscriber( event_type="ReloadUserInfoEvent", origin='UserNotificationService', callback=reload_user_info ) self.add_endpoint(self.reload_user_info_subscriber)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.time_dom, self.spatial_dom = time_series_domain()
def __init__(self, use_gate=True): """ @param use_gate True (the default) to use ProcessStateGate pattern. Otherwise, use the "create_process/subscribe-to-event/schedule_process/_await_state_event" pattern (as described in https://confluence.oceanobservatories.org/display/CIDev/R2+Process+Dispatcher+Guide as of Sept 14/12). """ self._use_gate = use_gate self._pd_client = ProcessDispatcherServiceClient()
def on_start(self): super(CacheLauncher,self).on_start() tms_cli = TransformManagementServiceClient() pubsub_cli = PubsubManagementServiceClient() pd_cli = ProcessDispatcherServiceClient() dname = CACHE_DATASTORE_NAME number_of_workers = self.CFG.get_safe('process.number_of_workers', 2) proc_def = ProcessDefinition(name='last_update_worker_process',description='Worker process for caching the last update from a stream') proc_def.executable['module'] = 'ion.processes.data.last_update_cache' proc_def.executable['class'] = 'LastUpdateCache' proc_def_id = pd_cli.create_process_definition(process_definition=proc_def) xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([get_sys_name(), xp_base]) except ValueError: raise StandardError('Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp) subscription_id = pubsub_cli.create_subscription(query=ExchangeQuery(), exchange_name='last_update_cache') config = { 'couch_storage' : { 'datastore_name' : dname, 'datastore_profile' : 'SCIDATA' } } for i in xrange(number_of_workers): transform_id = tms_cli.create_transform( name='last_update_cache%d' % i, description='last_update that compiles an aggregate of metadata', in_subscription_id=subscription_id, process_definition_id=proc_def_id, configuration=config ) tms_cli.activate_transform(transform_id=transform_id)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.process_definition = ProcessDefinition(name='test_process') self.process_definition.executable = {'module': 'ion.services.cei.test.test_process_dispatcher', 'class': 'TestProcess'} self.process_definition_id = self.pd_cli.create_process_definition(self.process_definition) self.waiter = ProcessStateWaiter()
def setUp(self): self._start_container() self.container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) self.container = self.container_client._get_container_instance() app = dict(processapp=("process_dispatcher", "ion.services.cei.process_dispatcher_service", "ProcessDispatcherService")) self.container.start_app(app, config=pd_config) self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.process_definition = ProcessDefinition(name='test_process') self.process_definition.executable = {'module': 'ion.services.cei.test.test_process_dispatcher', 'class': 'TestProcess'} self.process_definition_id = self.pd_cli.create_process_definition(self.process_definition) self.event_queue = queue.Queue() self.event_sub = None self.resource_id = "eeagent_123456789" self._eea_name = "eeagent" self.persistence_directory = tempfile.mkdtemp() self.agent_config = { 'eeagent': { 'heartbeat': 1, 'heartbeat_queue': 'hbeatq', 'slots': 100, 'name': 'pyon_eeagent', 'node_id': 'somenodeid', 'launch_type': { 'name': 'pyon', 'persistence_directory': self.persistence_directory, }, }, 'agent': {'resource_id': self.resource_id}, } #send a fake dt_state message to PD's dashi binding. dashi = get_dashi(uuid.uuid4().hex, pd_config['processdispatcher']['dashi_uri'], pd_config['processdispatcher']['dashi_exchange']) dt_state = dict(node_id="somenodeid", state=InstanceState.RUNNING, deployable_type="eeagent_pyon") dashi.fire(get_pd_dashi_name(), "dt_state", args=dt_state) self._eea_pid = self.container_client.spawn_process(name=self._eea_name, module="ion.agents.cei.execution_engine_agent", cls="ExecutionEngineAgent", config=self.agent_config) log.info('Agent pid=%s.', str(self._eea_pid))
def setUp(self): self._start_container() self.container.start_rel_from_url("res/deploy/r2cei.yml") # self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.pd_cli = ProcessDispatcherServiceClient(to_name="process_dispatcher") self.process_definition_id = uuid4().hex self.process_definition_name = "test" self.process_definition = ProcessDefinition( name=self.process_definition_name, executable={"module": "ion.agents.cei.test.test_haagent", "class": "TestProcess"}, ) self.pd_cli.create_process_definition(self.process_definition, self.process_definition_id) self.resource_id = "haagent_1234" self._haa_name = "high_availability_agent" self._haa_dashi_name = "dashi_haa_" + uuid4().hex self._haa_dashi_uri = get_dashi_uri_from_cfg() self._haa_dashi_exchange = "%s.hatests" % bootstrap.get_sys_name() self._haa_config = { "highavailability": { "policy": {"interval": 1, "name": "npreserving", "parameters": {"preserve_n": 0}}, "process_definition_id": self.process_definition_id, "dashi_messaging": True, "dashi_exchange": self._haa_dashi_exchange, "dashi_name": self._haa_dashi_name, }, "agent": {"resource_id": self.resource_id}, } self._base_services, _ = self.container.resource_registry.find_resources( restype="Service", name=self.process_definition_name ) self._base_procs = self.pd_cli.list_processes() self.waiter = ProcessStateWaiter() self.waiter.start() self.container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) self._haa_pid = self.container_client.spawn_process( name=self._haa_name, module="ion.agents.cei.high_availability_agent", cls="HighAvailabilityAgent", config=self._haa_config, ) # Start a resource agent client to talk with the instrument agent. self._haa_pyon_client = SimpleResourceAgentClient(self.resource_id, process=FakeProcess()) log.info("Got haa client %s.", str(self._haa_pyon_client)) self.haa_client = HighAvailabilityAgentClient(self._haa_pyon_client)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.pubsub_management = PubsubManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node) self.resource_registry = self.container.resource_registry self.context_ids = self.build_param_contexts() self.setup_resources()
def setUp(self): self._start_container() config = DotDict() config.bootstrap.processes.ingestion.module = 'ion.processes.data.ingestion.ingestion_worker_a' config.bootstrap.processes.replay.module = 'ion.processes.data.replay.replay_process_a' self.container.start_rel_from_url('res/deploy/r2dm.yml', config) self.datastore_name = 'test_datasets' self.pubsub_management = PubsubManagementServiceClient() self.ingestion_management = IngestionManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_retriever = DataRetrieverServiceClient()
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def begin(self): from interface.services.cei.iprocess_dispatcher_service import ProcessDispatcherServiceClient from pyon.net.messaging import make_node from pyon.core import bootstrap from pyon.public import CFG self.base_pids = [] self.rpc_timeout = 2 self._procs_by_test = {} if not bootstrap.pyon_initialized: bootstrap.bootstrap_pyon() self.node, self.ioloop = make_node() self.node.setup_interceptors(CFG.interceptor) self.pd_cli = ProcessDispatcherServiceClient(node=self.node)
def setUp(self): self._start_container() self.datastore_name = CACHE_DATASTORE_NAME self.container.start_rel_from_url('res/deploy/r2dm.yml') self.db = self.container.datastore_manager.get_datastore(self.datastore_name,DataStore.DS_PROFILE.SCIDATA) self.tms_cli = TransformManagementServiceClient() self.pubsub_cli = PubsubManagementServiceClient() self.pd_cli = ProcessDispatcherServiceClient() self.rr_cli = ResourceRegistryServiceClient() xs_dot_xp = CFG.core_xps.science_data try: self.XS, xp_base = xs_dot_xp.split('.') self.XP = '.'.join([bootstrap.get_sys_name(), xp_base]) except ValueError: raise StandardError('Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp)