Example #1
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli           = DataProductManagementServiceClient()
        self.rrclient           = ResourceRegistryServiceClient()
        self.damsclient         = DataAcquisitionManagementServiceClient()
        self.pubsubcli          = PubsubManagementServiceClient()
        self.ingestclient       = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc               = UserNotificationServiceClient()
        self.data_retriever     = DataRetrieverServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')

        self.process_definitions  = {}
        ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class' :'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space       = 'science_granule_ingestion'
        self.exchange_point       = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)
Example #2
0
    def create_worker(self, number_of_workers=1):
        """
        Creates notification workers

        @param number_of_workers int
        @retval pids list

        """

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition(
                name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class': 'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(
                process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(
                process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name':
                'notification_worker_%s' % n,
                'type':
                'simple',
                'queue_name':
                'notification_worker_queue'
            })

            pid = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration=configuration,
                process_id=pid2)

            pids.append(pid)

        return pids
Example #3
0
    def _do_launch_gate(self, platform_id, agent_config, timeout_spawn):
        """
        The method for when using the ProcessStateGate pattern, which is the
        one used by test_oms_launch2 to launch the root platform.
        """
        log.debug("_do_launch_gate: platform_id=%r, timeout_spawn=%s",
                  platform_id, timeout_spawn)

        pa_name = 'PlatformAgent_%s' % platform_id

        pdef = ProcessDefinition(name=pa_name)
        pdef.executable = {'module': PA_MOD, 'class': PA_CLS}
        pdef_id = self._pd_client.create_process_definition(
            process_definition=pdef)

        log.debug("using schedule_process directly %r", platform_id)

        pid = self._pd_client.schedule_process(process_definition_id=pdef_id,
                                               schedule=None,
                                               configuration=agent_config)

        if timeout_spawn:
            # ProcessStateGate used as indicated in its pydoc (9/21/12)
            gate = ProcessStateGate(self._pd_client.read_process, pid,
                                    ProcessStateEnum.RUNNING)
            err_msg = None
            try:
                if not gate. await (timeout_spawn):
                    err_msg = "The platform agent instance did not spawn in " \
                              "%s seconds.  gate.wait returned false. " % \
                          timeout_spawn
                    log.error(err_msg)

            except Exception as e:
                log.error(
                    "Exception while waiting for platform agent instance "
                    "(platform_id=%r) "
                    "to spawn in %s seconds: %s", platform_id, timeout_spawn,
                    str(e))  #,exc_Info=True)

            if err_msg:
                raise PlatformException(err_msg)

        log.debug(
            "_do_launch_gate: platform_id=%r: agent spawned, pid=%r "
            "(ProcessStateGate pattern used)", platform_id, pid)

        return pid
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2cei.yml')

        self.rr_cli = ResourceRegistryServiceClient()
        self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.process_definition = ProcessDefinition(name='test_process')
        self.process_definition.executable = {
            'module': 'ion.services.cei.test.test_process_dispatcher',
            'class': 'TestProcess'
        }
        self.process_definition_id = self.pd_cli.create_process_definition(
            self.process_definition)

        self.waiter = ProcessStateWaiter()
Example #5
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2cei.yml')

        self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.process_definition = ProcessDefinition(name='test_process')
        self.process_definition.executable = {
            'module': 'ion.services.cei.test.test_process_dispatcher',
            'class': 'TestProcess'
        }
        self.process_definition_id = self.pd_cli.create_process_definition(
            self.process_definition)
        self.event_queue = queue.Queue()

        self.event_sub = None
Example #6
0
    def pydap_server(self, process, config):
        pydap_module = config.get_safe(
            'bootstrap.processes.pydap.module',
            'ion.processes.data.externalization.lightweight_pydap')
        pydap_class = config.get_safe('bootstrap.processes.pydap.class',
                                      'LightweightPyDAP')

        use_pydap = config.get_safe('bootstrap.launch_pydap', False)

        process_definition = ProcessDefinition(
            name='pydap_server',
            description='Lightweight WSGI Server for PyDAP')
        process_definition.executable['module'] = pydap_module
        process_definition.executable['class'] = pydap_class

        self._create_and_launch(process_definition, use_pydap)
Example #7
0
    def test_event_triggered_transform_B(self):
        '''
        Test that packets are processed by the event triggered transform
        '''

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='EventTriggeredTransform_B',
            description='For testing EventTriggeredTransform_B')
        process_definition.executable[
            'module'] = 'ion.processes.data.transforms.event_triggered_transform'
        process_definition.executable['class'] = 'EventTriggeredTransform_B'
        event_transform_proc_def_id = self.process_dispatcher.create_process_definition(
            process_definition=process_definition)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)

        stream_def_id = self.pubsub.create_stream_definition(
            'stream_def', parameter_dictionary_id=pdict_id)
        stream_id, _ = self.pubsub.create_stream(
            'test_stream',
            exchange_point='science_data',
            stream_definition_id=stream_def_id)

        # Build the config
        config = DotDict()
        config.process.queue_name = self.exchange_name
        config.process.exchange_point = self.exchange_point
        config.process.publish_streams.output = stream_id
        config.process.event_type = 'ResourceLifecycleEvent'
        config.process.stream_id = stream_id

        # Schedule the process
        self.process_dispatcher.schedule_process(
            process_definition_id=event_transform_proc_def_id,
            configuration=config)

        #---------------------------------------------------------------------------------------------
        # Publish an event to wake up the event triggered transform
        #---------------------------------------------------------------------------------------------

        event_publisher = EventPublisher("ResourceLifecycleEvent")
        event_publisher.publish_event(origin='fake_origin')
Example #8
0
    def on_start(self):
        super(CacheLauncher, self).on_start()
        tms_cli = TransformManagementServiceClient()
        pubsub_cli = PubsubManagementServiceClient()
        pd_cli = ProcessDispatcherServiceClient()
        dname = CACHE_DATASTORE_NAME
        number_of_workers = self.CFG.get_safe('process.number_of_workers', 2)

        proc_def = ProcessDefinition(
            name='last_update_worker_process',
            description=
            'Worker process for caching the last update from a stream')
        proc_def.executable['module'] = 'ion.processes.data.last_update_cache'
        proc_def.executable['class'] = 'LastUpdateCache'
        proc_def_id = pd_cli.create_process_definition(
            process_definition=proc_def)

        xs_dot_xp = CFG.core_xps.science_data
        try:
            self.XS, xp_base = xs_dot_xp.split('.')
            self.XP = '.'.join([get_sys_name(), xp_base])
        except ValueError:
            raise StandardError(
                'Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure'
                % xs_dot_xp)

        subscription_id = pubsub_cli.create_subscription(
            query=ExchangeQuery(), exchange_name='last_update_cache')

        config = {
            'couch_storage': {
                'datastore_name': dname,
                'datastore_profile': 'SCIDATA'
            }
        }

        for i in xrange(number_of_workers):

            transform_id = tms_cli.create_transform(
                name='last_update_cache%d' % i,
                description=
                'last_update that compiles an aggregate of metadata',
                in_subscription_id=subscription_id,
                process_definition_id=proc_def_id,
                configuration=config)

        tms_cli.activate_transform(transform_id=transform_id)
Example #9
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2cei.yml')
        #self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node)
        self.pd_cli = ProcessDispatcherServiceClient(
            to_name="process_dispatcher")

        self.process_definition_id = uuid4().hex
        self.process_definition_name = 'test_haagent_%s' % self.process_definition_id
        self.process_definition = ProcessDefinition(
            name=self.process_definition_name,
            executable={
                'module': 'ion.agents.cei.test.test_haagent',
                'class': 'TestProcess'
            })
        self.pd_cli.create_process_definition(self.process_definition,
                                              self.process_definition_id)

        service_definition = SERVICE_DEFINITION_TMPL % self.process_definition_name
        sd = IonObject(RT.ServiceDefinition, {
            "name": self.process_definition_name,
            "definition": service_definition
        })
        self.service_def_id, _ = self.container.resource_registry.create(sd)

        self.resource_id = "haagent_1234"
        self._haa_name = "high_availability_agent"
        self._haa_dashi_name = "dashi_haa_" + uuid4().hex
        self._haa_dashi_uri = get_dashi_uri_from_cfg()
        self._haa_dashi_exchange = "hatests"
        self._haa_config = self._get_haagent_config()

        self._base_services, _ = self.container.resource_registry.find_resources(
            restype="Service", name=self.process_definition_name)

        self._base_procs = self.pd_cli.list_processes()

        self.waiter = ProcessStateWaiter()
        self.waiter.start()

        self.container_client = ContainerAgentClient(node=self.container.node,
                                                     name=self.container.name)
        self._spawn_haagent()
        self.addCleanup(self._stop_haagent)

        self._setup_haa_client()
Example #10
0
    def replay_defs(self, process, config):
        replay_module = config.get_safe(
            'bootstrap.processes.replay.module',
            'ion.processes.data.replay.replay_process')
        replay_class = config.get_safe('bootstrap.processes.replay.class',
                                       'ReplayProcess')
        #--------------------------------------------------------------------------------
        # Create replay process definition
        #--------------------------------------------------------------------------------

        process_definition = ProcessDefinition(
            name=DataRetrieverService.REPLAY_PROCESS,
            description='Process for the replay of datasets')
        process_definition.executable['module'] = replay_module
        process_definition.executable['class'] = replay_class
        self.pds_client.create_process_definition(
            process_definition=process_definition)
Example #11
0
    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)
Example #12
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        log.debug("Start rel from url")
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.DPMS = DataProductManagementServiceClient()
        self.RR = ResourceRegistryServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.DAMS = DataAcquisitionManagementServiceClient()
        self.PSMS = PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.PD = ProcessDispatcherServiceClient()
        self.DSMS = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------
        log.debug("get datastore")
        datastore_name = CACHE_DATASTORE_NAME
        self.db = self.container.datastore_manager.get_datastore(
            datastore_name)
        self.stream_def_id = self.PSMS.create_stream_definition(
            name='SBE37_CDM')

        self.process_definitions = {}
        ingestion_worker_definition = ProcessDefinition(
            name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':
            'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class': 'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.PD.create_process_definition(
            process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        self.addCleanup(self.cleaning_up)
    def setUp(self):
        DMTestCase.setUp(self)

        process_definition = ProcessDefinition(
            name='qc_post_processor',
            executable={
                'module': 'ion.processes.data.transforms.qc_post_processing',
                'class': 'QCPostProcessing'
            })
        self.process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition)
        self.addCleanup(self.process_dispatcher.delete_process_definition,
                        self.process_definition_id)

        self.process_id = self.process_dispatcher.create_process(
            self.process_definition_id)
        self.scheduler_service = SchedulerServiceClient()
Example #14
0
    def create_process(name= '', module = '', class_name = '', configuration = None):
        '''
        A helper method to create a process
        '''

        producer_definition = ProcessDefinition(name=name)
        producer_definition.executable = {
            'module':module,
            'class': class_name
        }

        process_dispatcher = ProcessDispatcherServiceClient()

        procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
        pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration)

        return pid
Example #15
0
    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.process_dispatch_client.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.process_dispatch_client.schedule_process(process_definition_id=logger_procdef_id, configuration=configuration)

        return pid
    def setUp(self):
        self.dashi = None
        self._start_container()
        from pyon.public import CFG

        self.container_client = ContainerAgentClient(node=self.container.node,
                                                     name=self.container.name)
        self.container = self.container_client._get_container_instance()

        app = dict(name="process_dispatcher",
                   processapp=("process_dispatcher",
                               "ion.services.cei.process_dispatcher_service",
                               "ProcessDispatcherService"))
        self.container.start_app(app, config=pd_config)

        self.rr_cli = self.container.resource_registry

        self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.process_definition = ProcessDefinition(name='test_process')
        self.process_definition.executable = {
            'module': 'ion.services.cei.test.test_process_dispatcher',
            'class': 'TestProcess'
        }
        self.process_definition_id = self.pd_cli.create_process_definition(
            self.process_definition)

        self._eea_pids = []
        self._eea_pid_to_resource_id = {}
        self._eea_pid_to_persistence_dir = {}
        self._tmpdirs = []

        self.dashi = get_dashi(
            uuid.uuid4().hex,
            pd_config['processdispatcher']['dashi_uri'],
            pd_config['processdispatcher']['dashi_exchange'],
            sysname=CFG.get_safe("dashi.sysname"))

        #send a fake node_state message to PD's dashi binding.
        self.node1_id = uuid.uuid4().hex
        self._send_node_state("engine1", self.node1_id)
        self._initial_eea_pid = self._start_eeagent(self.node1_id)

        self.waiter = ProcessStateWaiter()
    def create_event_process_definition(self,
                                        version='',
                                        module='',
                                        class_name='',
                                        uri='',
                                        arguments=None,
                                        event_types=None,
                                        sub_types=None,
                                        origin_types=None):
        """
        Create a resource which defines the processing of events.

        @param version str
        @param module str
        @param class_name str
        @param uri str
        @param arguments list

        @return procdef_id str
        """

        # Create the event process detail object
        event_process_definition_detail = EventProcessDefinitionDetail()
        event_process_definition_detail.event_types = event_types
        event_process_definition_detail.sub_types = sub_types
        event_process_definition_detail.origin_types = origin_types

        # Create the process definition
        process_definition = ProcessDefinition(
            name=create_unique_identifier('event_process'))
        process_definition.executable = {
            'module': module,
            'class': class_name,
            'url': uri
        }
        process_definition.version = version
        process_definition.arguments = arguments
        process_definition.definition = event_process_definition_detail

        procdef_id = self.clients.process_dispatcher.create_process_definition(
            process_definition=process_definition)

        return procdef_id
Example #18
0
    def test_update_event_process_definition(self):
        """
        Test updating an event process definition
        """
        process_definition_id = 'an id'
        process_def = ProcessDefinition()

        self.mock_rr_client.read = mocksignature(self.mock_rr_client.read)
        self.mock_rr_client.read.return_value = process_def

        self.mock_rr_client.update = mocksignature(self.mock_rr_client.update)

        self.event_management.update_event_process_definition(  event_process_definition_id =process_definition_id,
                                                                version='version',
                                                                event_types=['t1', 't2'],
                                                                sub_types=['t3'],
                                                                origin_types=['or1', 'or2'])

        self.mock_rr_client.update.assert_called_once_with(process_def)
Example #19
0
    def _get_highcharts_procdef(self):
        '''
        A somewhat idempotent retrieve of the process definition
        Returns the high charts process definition id 
        '''
        procdefs, _ = self.clients.resource_registry.find_resources(
            name='HighChartsDefinition',
            restype=RT.ProcessDefinition,
            id_only=True)
        if procdefs:
            return procdefs[0]

        procdef = ProcessDefinition(name='HighChartsDefinition')
        procdef.executable[
            'module'] = 'ion.processes.data.transforms.viz.highcharts'
        procdef.executable['class'] = 'VizTransformHighCharts'

        procdef_id = self.clients.process_dispatcher.create_process_definition(
            procdef)
        return procdef_id
Example #20
0
    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id],id_only=True)
        self.assertTrue(results == [transform_id])

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
Example #21
0
    def registration_worker(self, process, config):
        res, meta = self.resource_registry.find_resources(
            name='registration_worker', restype=RT.ProcessDefinition)
        if len(res):
            return

        registration_module = config.get_safe(
            'bootstrap.processes.registration.module',
            'ion.processes.data.registration.registration_process')
        registration_class = config.get_safe(
            'bootstrap.processes.registration.class', 'RegistrationProcess')
        use_pydap = True

        process_definition = ProcessDefinition(
            name='registration_worker',
            description='For registering datasets with ERDDAP')
        process_definition.executable['module'] = registration_module
        process_definition.executable['class'] = registration_class

        self._create_and_launch(process_definition, use_pydap)
    def setUp(self):
        # set up the container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        self.tms_cli = TransformManagementServiceClient(node=self.container.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
        self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.input_stream_id = self.pubsub_cli.create_stream(name='input_stream',original=True)

        self.input_subscription_id = self.pubsub_cli.create_subscription(query=StreamQuery(stream_ids=[self.input_stream_id]),exchange_name='transform_input',name='input_subscription')

        self.output_stream_id = self.pubsub_cli.create_stream(name='output_stream',original=True)

        self.process_definition = ProcessDefinition(name='basic_transform_definition')
        self.process_definition.executable = {'module': 'ion.processes.data.transforms.transform_example',
                                              'class':'TransformExample'}
        self.process_definition_id = self.procd_cli.create_process_definition(process_definition=self.process_definition)
Example #23
0
    def on_initial_bootstrap(self, process, config, **kwargs):

        if os.environ.get('PYCC_MODE'):
            # This environment is an ion integration test
            log.info('PYCC_MODE: skipping qc_post_processor launch')
            return
        if self.process_exists(process, 'qc_post_processor'):
            # Short circuit the bootstrap to make sure not more than one is ever started
            return

        self.scheduler_service = SchedulerServiceProcessClient(process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(
            process=process)
        self.run_interval = CFG.get_safe('service.qc_processing.run_interval',
                                         24)

        interval_key = uuid4().hex  # Unique identifier for this process

        config = DotDict()
        config.process.interval_key = interval_key

        process_definition = ProcessDefinition(
            name='qc_post_processor',
            executable={
                'module': 'ion.processes.data.transforms.qc_post_processing',
                'class': 'QCPostProcessing'
            })
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition)

        process_id = self.process_dispatcher.create_process(
            process_definition_id)
        self.process_dispatcher.schedule_process(process_definition_id,
                                                 process_id=process_id,
                                                 configuration=config)

        timer_id = self.scheduler_service.create_interval_timer(
            start_time=str(time.time()),
            end_time='-1',  #Run FOREVER
            interval=3600 * self.run_interval,
            event_origin=interval_key)
Example #24
0
    def setUp(self):
        mock_clients = self._create_service_mock('data_retriever')
        self.data_retriever_service = DataRetrieverService()
        self.data_retriever_service.clients = mock_clients
        self.mock_rr_create = self.data_retriever_service.clients.resource_registry.create
        self.mock_rr_create_assoc = self.data_retriever_service.clients.resource_registry.create_association
        self.mock_rr_read = self.data_retriever_service.clients.resource_registry.read
        self.mock_rr_update = self.data_retriever_service.clients.resource_registry.update
        self.mock_rr_delete = self.data_retriever_service.clients.resource_registry.delete
        self.mock_rr_delete_assoc = self.data_retriever_service.clients.resource_registry.delete_association
        self.mock_rr_find_assoc = self.data_retriever_service.clients.resource_registry.find_associations
        self.mock_ps_create_stream = self.data_retriever_service.clients.pubsub_management.create_stream
        self.mock_ps_create_stream_definition = self.data_retriever_service.clients.pubsub_management.create_stream_definition
        self.data_retriever_service.container = DotDict({
            'id':
            '123',
            'spawn_process':
            Mock(),
            'proc_manager':
            DotDict({
                'terminate_process': Mock(),
                'procs': []
            }),
            'datastore_manager':
            DotDict({'get_datastore': Mock()})
        })
        self.datastore = DotDict({'query_view': Mock()})
        self.data_retriever_service.container.datastore_manager.get_datastore.return_value = self.datastore
        self.mock_cc_spawn = self.data_retriever_service.container.spawn_process
        self.mock_cc_terminate = self.data_retriever_service.container.proc_manager.terminate_process
        self.mock_pd_schedule = self.data_retriever_service.clients.process_dispatcher.schedule_process
        self.mock_pd_cancel = self.data_retriever_service.clients.process_dispatcher.cancel_process
        self.mock_ds_read = self.data_retriever_service.clients.dataset_management.read_dataset
        self.data_retriever_service.process_definition = ProcessDefinition()
        self.data_retriever_service.process_definition.executable[
            'module'] = 'ion.processes.data.replay_process'
        self.data_retriever_service.process_definition.executable[
            'class'] = 'ReplayProcess'

        self.data_retriever_service.process_definition_id = 'mock_procdef_id'
    def test_definitions(self):

        executable = {'module': 'my_module', 'class': 'class'}
        definition = ProcessDefinition(name="someprocess",
                                       executable=executable)
        pd_id = self.pd_service.create_process_definition(definition)
        assert self.mock_core.create_definition.called
        self.assertTrue(pd_id)
        assert self.mock_rr.create.called_once_with(definition,
                                                    object_id=pd_id)

        self.mock_core.describe_definition.return_value = dict(
            name="someprocess", executable=executable)

        definition2 = self.pd_service.read_process_definition("someprocess")
        assert self.mock_core.describe_definition.called
        self.assertEqual(definition2.name, "someprocess")
        self.assertEqual(definition2.executable, executable)

        self.pd_service.delete_process_definition("someprocess")
        assert self.mock_core.remove_definition.called
        assert self.mock_rr.delete.called_once_with(pd_id)
Example #26
0
    def test_presf_L0_splitter(self):
        '''
        Test that packets are processed by the ctd_L1_pressure transform
        '''

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='Presf L0 Splitter',
            description='For testing Presf L0 Splitter')
        process_definition.executable[
            'module'] = 'ion.processes.data.transforms.ctd.presf_L0_splitter'
        process_definition.executable['class'] = 'PresfL0Splitter'
        ctd_transform_proc_def_id = self.process_dispatcher.create_process_definition(
            process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = self.exchange_name
        config.process.exchange_point = self.exchange_point

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)

        stream_def_id = self.pubsub.create_stream_definition(
            'pres_stream_def', parameter_dictionary_id=pdict_id)
        pres_stream_id, _ = self.pubsub.create_stream(
            'test_pressure',
            stream_definition_id=stream_def_id,
            exchange_point='science_data')

        config.process.publish_streams.absolute_pressure = pres_stream_id

        # Schedule the process
        self.process_dispatcher.schedule_process(
            process_definition_id=ctd_transform_proc_def_id,
            configuration=config)
Example #27
0
    def start_input_stream_process(self, ctd_stream_id, module = 'ion.processes.data.ctd_stream_publisher', class_name= 'SimpleCtdPublisher'):
        ###
        ### Start the process for producing the CTD data
        ###
        # process definition for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module':module,
            'class':class_name
        }

        ctd_sim_procdef_id = self.process_dispatcher.create_process_definition(process_definition=producer_definition)

        # Start the ctd simulator to produce some data
        configuration = {
            'process':{
                'stream_id':ctd_stream_id,
                }
        }

        ctd_sim_pid = self.process_dispatcher.schedule_process(process_definition_id=ctd_sim_procdef_id, configuration=configuration)

        return ctd_sim_pid
    def test_schedule(self):

        self.mock_backend['read_definition'].return_value = ProcessDefinition()

        upid = 'myupid'
        name = 'myname'
        definition_id = 'something'
        queueing_mode = 'RESTART_ONLY'
        restart_mode = 'ABNORMAL'

        self.pd_dashi_handler.schedule_process(upid,
                                               definition_id,
                                               queueing_mode=queueing_mode,
                                               restart_mode=restart_mode,
                                               name=name)

        self.assertEqual(self.mock_backend.schedule.call_count, 1)
        args, kwargs = self.mock_backend.schedule.call_args
        passed_schedule = args[2]
        assert passed_schedule.queueing_mode == ProcessQueueingMode.RESTART_ONLY
        assert passed_schedule.restart_mode == ProcessRestartMode.ABNORMAL
        passed_name = args[4]
        assert passed_name == name
Example #29
0
    def test_cei_launch_mode(self):
        
        pdc = ProcessDispatcherServiceClient(node=self.container.node)
        p_def = ProcessDefinition(name='Agent007')
        p_def.executable = {
            'module' : 'ion.agents.instrument.instrument_agent',
            'class' : 'InstrumentAgent'
        }
        p_def_id = pdc.create_process_definition(p_def)
        
        pid = pdc.create_process(p_def_id)
        
        def event_callback(event, *args, **kwargs):
            print '######### proc %s in state %s' % (event.origin, ProcessStateEnum._str_map[event.state])
 
        sub = EventSubscriber(event_type='ProcessLifecycleEvent',
                              callback=event_callback,
                              origin=pid,
                              origin_type='DispatchedProcess')
         
        sub.start()

        agent_config = deepcopy(self._agent_config)
        agent_config['bootmode'] = 'restart'
        pdc.schedule_process(p_def_id, process_id=pid,
                             configuration=agent_config)
        
        gevent.sleep(5)
        
        pdc.cancel_process(pid)
        
        gevent.sleep(15)

        sub.stop()
        
        
    def test_schedule_by_name(self):
        pdef = ProcessDefinition()
        pdef._id = "someprocessid"
        self.mock_backend['read_definition_by_name'].return_value = pdef

        upid = 'myupid'
        definition_name = 'something'
        queueing_mode = 'RESTART_ONLY'
        restart_mode = 'ABNORMAL'

        self.pd_dashi_handler.schedule_process(upid,
                                               definition_name=definition_name,
                                               queueing_mode=queueing_mode,
                                               restart_mode=restart_mode)

        self.mock_backend.read_definition_by_name.assert_called_once_with(
            definition_name)
        self.assertFalse(self.mock_backend.read_definition.call_count)

        self.assertEqual(self.mock_backend.schedule.call_count, 1)
        args, kwargs = self.mock_backend.schedule.call_args
        passed_schedule = args[2]
        assert passed_schedule.queueing_mode == ProcessQueueingMode.RESTART_ONLY
        assert passed_schedule.restart_mode == ProcessRestartMode.ABNORMAL