def test_create_event_process(self):
        """
        Test creating an event process
        """
        process_definition = ProcessDefinition(name='test')
        process_definition.definition = ''

        rrc = ResourceRegistryServiceClient(node = self.container.node)
        process_definition_id = rrc.create(process_definition)

        self.mock_rr_client.find_objects = Mock()
        self.mock_rr_client.find_objects.return_value = ['stream_id_1'], 'obj_assoc_1'

#        self.mock_pd_client.schedule_process = Mock()
#        self.mock_pd_client.schedule_process.return_value = 'process_id'

        self.mock_rr_client.create_association = mocksignature(self.mock_rr_client.create_association)

        pid = self.event_management.create_event_process(process_definition_id=process_definition_id,
            event_types=['type_1', 'type_2'],
            sub_types=['subtype_1', 'subtype_2'],
            origins=['or_1', 'or_2'],
            origin_types=['t1', 't2'],
            out_data_products={'conductivity': 'id1'}
        )
    def notification_worker(self, process, config):
        # user notifications
        notification_module    = config.get_safe('bootstrap.processes.user_notification.module','ion.processes.data.transforms.notification_worker')
        notification_class     = config.get_safe('bootstrap.processes.user_notification.class' ,'NotificationWorker')
        notification_workers = config.get_safe('bootstrap.processes.user_notification.workers', 1)

        #--------------------------------------------------------------------------------
        # Create notification workers
        #--------------------------------------------------------------------------------

        # set up the process definition
        process_definition_uns = ProcessDefinition(
            name='notification_worker_process',
            description='Worker transform process for user notifications')
        process_definition_uns.executable['module']= notification_module
        process_definition_uns.executable['class'] = notification_class
        uns_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition_uns)

        config = DotDict()
        config.process.type = 'simple'

        for i in xrange(notification_workers):
            config.process.name = 'notification_worker_%s' % i
            config.process.queue_name = 'notification_worker_queue'
            self.pds_client.schedule_process(process_definition_id=uns_procdef_id, configuration=config)
    def ingestion_worker(self, process, config):
        # ingestion
        ingestion_module    = config.get_safe('bootstrap.processes.ingestion.module','ion.processes.data.ingestion.science_granule_ingestion_worker')
        ingestion_class     = config.get_safe('bootstrap.processes.ingestion.class' ,'ScienceGranuleIngestionWorker')
        ingestion_datastore = config.get_safe('bootstrap.processes.ingestion.datastore_name', 'datasets')
        ingestion_queue     = config.get_safe('bootstrap.processes.ingestion.queue' , 'science_granule_ingestion')
        ingestion_workers   = config.get_safe('bootstrap.processes.ingestion.workers', 1)
        #--------------------------------------------------------------------------------
        # Create ingestion workers
        #--------------------------------------------------------------------------------

        process_definition = ProcessDefinition(
            name='ingestion_worker_process',
            description='Worker transform process for ingestion of datasets')
        process_definition.executable['module']= ingestion_module
        process_definition.executable['class'] = ingestion_class
        ingestion_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition)

        #--------------------------------------------------------------------------------
        # Simulate a HA ingestion worker by creating two of them
        #--------------------------------------------------------------------------------
        config = DotDict()
        config.process.datastore_name = ingestion_datastore
        config.process.queue_name     = ingestion_queue

        for i in xrange(ingestion_workers):
            self.pds_client.schedule_process(process_definition_id=ingestion_procdef_id, configuration=config)
Example #4
0
    def launch_platform(self, agt_id, agent_config, timeout_spawn=30):
        """
        Launches a platform agent.

        @param agt_id           Some ID mainly used for logging
        @param agent_config     Agent configuration
        @param timeout_spawn    Timeout in secs for the SPAWN event (by
                                default 30). If None or zero, no wait is performed.

        @return process ID
        """
        log.debug("launch platform: agt_id=%r, timeout_spawn=%s", agt_id, timeout_spawn)

        name = 'PlatformAgent_%s' % agt_id
        pdef = ProcessDefinition(name=name)
        pdef.executable = {
            'module': 'ion.agents.platform.platform_agent',
            'class':  'PlatformAgent'
        }

        pdef_id = self._pd_client.create_process_definition(process_definition=pdef)

        pid = self._agent_launcher.launch(agent_config, pdef_id)

        if timeout_spawn:
            self._agent_launcher.await_launch(timeout_spawn)

        return pid
    def create_data_process_definition(self, data_process_definition=None):

        data_process_definition_id = self.RR2.create(data_process_definition, RT.DataProcessDefinition)

        # -------------------------------
        # Process Definition
        # -------------------------------
        # Create the underlying process definition
        process_definition = ProcessDefinition()
        process_definition.name = data_process_definition.name
        process_definition.description = data_process_definition.description

        process_definition.executable = {
            "module": data_process_definition.module,
            "class": data_process_definition.class_name,
        }
        process_definition_id = self.clients.process_dispatcher.create_process_definition(
            process_definition=process_definition
        )

        self.RR2.assign_process_definition_to_data_process_definition_with_has_process_definition(
            process_definition_id, data_process_definition_id
        )

        return data_process_definition_id
    def create_event_process_definition(self, version='', module='', class_name='', uri='', arguments=None, event_types = None, sub_types = None, origin_types = None):
        """
        Create a resource which defines the processing of events.

        @param version str
        @param module str
        @param class_name str
        @param uri str
        @param arguments list

        @return procdef_id str
        """

        # Create the event process detail object
        event_process_definition_detail = EventProcessDefinitionDetail()
        event_process_definition_detail.event_types = event_types
        event_process_definition_detail.sub_types = sub_types
        event_process_definition_detail.origin_types = origin_types

        # Create the process definition
        process_definition = ProcessDefinition(name=create_unique_identifier('event_process'))
        process_definition.executable = {
            'module':module,
            'class': class_name,
            'url': uri
        }
        process_definition.version = version
        process_definition.arguments = arguments
        process_definition.definition = event_process_definition_detail

        procdef_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)

        return procdef_id
Example #7
0
    def launch_instrument(self, agt_id, agent_config, timeout_spawn=None):
        """
        Launches an instrument agent.

        @param agt_id           Some ID mainly used for logging
        @param agent_config     Agent configuration
        @param timeout_spawn    Timeout in secs for the RUNNING event (by
                                default, the value given in constructor).
                                If None or zero, no wait is performed.

        @return process ID
        """
        timeout_spawn = timeout_spawn or self._timeout_spawn
        log.debug("launch_instrument: agt_id=%r, timeout_spawn=%s", agt_id, timeout_spawn)

        name = 'InstrumentAgent_%s' % agt_id
        pdef = ProcessDefinition(name=name)
        pdef.executable = {
            'module': 'ion.agents.instrument.instrument_agent',
            'class':  'InstrumentAgent'
        }

        pdef_id = self._pd_client.create_process_definition(process_definition=pdef)

        pid = self._agent_launcher.launch(agent_config, pdef_id)

        if timeout_spawn:
            log.debug("launch_instrument: agt_id=%r: waiting for RUNNING", agt_id)
            self._agent_launcher.await_launch(timeout_spawn)
            log.debug("launch_instrument: agt_id=%r: RUNNING", agt_id)

        return pid
    def test_presf_L1(self):
        '''
        Test that packets are processed by the ctd_L1_pressure transform
        '''

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='PresfL1Transform',
            description='For testing PresfL1Transform')
        process_definition.executable['module']= 'ion.processes.data.transforms.ctd.presf_L1'
        process_definition.executable['class'] = 'PresfL1Transform'
        ctd_transform_proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = self.exchange_name
        config.process.exchange_point = self.exchange_point

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

        stream_def_id =  self.pubsub.create_stream_definition('pres_stream_def', parameter_dictionary_id=pdict_id)
        pres_stream_id, _ = self.pubsub.create_stream('test_pressure',
            stream_definition_id=stream_def_id,
            exchange_point='science_data')

        config.process.publish_streams.seafloor_pressure = pres_stream_id

        # Schedule the process
        self.process_dispatcher.schedule_process(process_definition_id=ctd_transform_proc_def_id, configuration=config)
    def dispatch_process(self, upid, spec, subscribers, constraints=None,
                         immediate=False):

        name = spec.get('name')
        self.event_pub.publish_event(event_type="ProcessLifecycleEvent",
            origin=name, origin_type="DispatchedHAProcess",
            state=ProcessStateEnum.SPAWN)
        process_def = ProcessDefinition(name=name)
        process_def.executable = {'module': spec.get('module'),
                'class': spec.get('class')}

        process_def_id = self.real_client.create_process_definition(process_def)

        pid = self.real_client.create_process(process_def_id)

        process_schedule = ProcessSchedule()

        sched_pid = self.real_client.schedule_process(process_def_id,
                process_schedule, configuration={}, process_id=pid)

        proc = self.real_client.read_process(sched_pid)
        dict_proc = {'upid': proc.process_id,
                'state': self.state_map.get(proc.process_state, self.unknown_state),
                }
        return dict_proc
    def start_worker(self):


        proc_def = ProcessDefinition()
        proc_def.executable['module'] = 'ion.processes.data.last_update_cache'
        proc_def.executable['class'] = 'LastUpdateCache'
        proc_def_id = self.pd_cli.create_process_definition(process_definition=proc_def)


        subscription_id = self.pubsub_cli.create_subscription(query=ExchangeQuery(), exchange_name='ingestion_aggregate')

        config = {
            'couch_storage' : {
                'datastore_name' :self.datastore_name,
                'datastore_profile' : 'SCIDATA'
            }
        }

        transform_id = self.tms_cli.create_transform(
            name='last_update_cache',
            description='LastUpdate that compiles an aggregate of metadata',
            in_subscription_id=subscription_id,
            process_definition_id=proc_def_id,
            configuration=config
        )


        self.tms_cli.activate_transform(transform_id=transform_id)
        transform = self.rr_cli.read(transform_id)
        pid = transform.process_id
        handle = self.container.proc_manager.procs[pid]
        return handle
    def test_event_in_stream_out_transform(self):
        """
        Test the event-in/stream-out transform
        """

        stream_id, _ = self.pubsub.create_stream('test_stream', exchange_point='science_data')
        self.exchange_cleanup.append('science_data')

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='EventToStreamTransform',
            description='For testing an event-in/stream-out transform')
        process_definition.executable['module']= 'ion.processes.data.transforms.event_in_stream_out_transform'
        process_definition.executable['class'] = 'EventToStreamTransform'
        proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = 'test_queue'
        config.process.exchange_point = 'science_data'
        config.process.publish_streams.output = stream_id
        config.process.event_type = 'ExampleDetectableEvent'
        config.process.variables = ['voltage', 'temperature' ]

        # Schedule the process
        pid = self.process_dispatcher.schedule_process(process_definition_id=proc_def_id, configuration=config)
        self.addCleanup(self.process_dispatcher.cancel_process,pid)

        #---------------------------------------------------------------------------------------------
        # Create a subscriber for testing
        #---------------------------------------------------------------------------------------------

        ar_cond = gevent.event.AsyncResult()
        def subscriber_callback(m, r, s):
            ar_cond.set(m)
        sub = StandaloneStreamSubscriber('sub', subscriber_callback)
        self.addCleanup(sub.stop)
        sub_id = self.pubsub.create_subscription('subscription_cond',
            stream_ids=[stream_id],
            exchange_name='sub')
        self.pubsub.activate_subscription(sub_id)
        self.queue_cleanup.append(sub.xn.queue)
        sub.start()

        gevent.sleep(4)

        #---------------------------------------------------------------------------------------------
        # Publish an event. The transform has been configured to receive this event
        #---------------------------------------------------------------------------------------------

        event_publisher = EventPublisher("ExampleDetectableEvent")
        event_publisher.publish_event(origin = 'fake_origin', voltage = '5', temperature = '273')

        # Assert that the transform processed the event and published data on the output stream
        result_cond = ar_cond.get(timeout=10)
        self.assertTrue(result_cond)
Example #12
0
    def __init__(self, *args, **kwargs):
        super(TransformExampleLauncher,self).__init__(*args,**kwargs)

        #-------------------------------
        # Process Definitions
        #-------------------------------

        transform_example_definition = ProcessDefinition(name='transform_example_definition')
        transform_example_definition.executable['module'] = 'ion.services.dm.transformation.transform_example'
        transform_example_definition.executable['class'] = 'TransformExample'
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        datastore_name = CACHE_DATASTORE_NAME
        self.db = self.container.datastore_manager.get_datastore(datastore_name)
        self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')

        self.process_definitions  = {}
        ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class' :'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space       = 'science_granule_ingestion'
        self.exchange_point       = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)
    def replay_defs(self, process, config):
        replay_module       = config.get_safe('bootstrap.processes.replay.module', 'ion.processes.data.replay.replay_process')
        replay_class        = config.get_safe('bootstrap.processes.replay.class' , 'ReplayProcess')
        #--------------------------------------------------------------------------------
        # Create replay process definition
        #--------------------------------------------------------------------------------

        process_definition = ProcessDefinition(name=DataRetrieverService.REPLAY_PROCESS, description='Process for the replay of datasets')
        process_definition.executable['module']= replay_module
        process_definition.executable['class'] = replay_class
        self.pds_client.create_process_definition(process_definition=process_definition)
    def eoi_services(self,process,config):
        eoi_module = config.get_safe('bootstrap.processes.registration.module', 'ion.processes.data.registration.eoi_registration_process')
        eoi_class  = config.get_safe('bootstrap.processes.registration.class', 'EOIRegistrationProcess')

        process_definition = ProcessDefinition(
                name = 'eoi_server',
                description = 'Process for eoi data sources')
        process_definition.executable['module'] = eoi_module
        process_definition.executable['class'] = eoi_class

        self._create_and_launch(process_definition)
    def test_execute_transform(self):
        # set up
        process_definition = ProcessDefinition(name='procdef_execute')
        process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
        process_definition.executable['class'] = 'ReverseTransform'
        data = [1,2,3]

        process_definition_id, _ = self.rr_cli.create(process_definition)

        retval = self.tms_cli.execute_transform(process_definition_id,data)

        self.assertEquals(retval,[3,2,1])
    def test_execute_transform(self):
        # set up
        process_definition = ProcessDefinition(name="procdef_execute")
        process_definition.executable["module"] = "ion.processes.data.transforms.transform_example"
        process_definition.executable["class"] = "ReverseTransform"
        data = [1, 2, 3]

        process_definition_id, _ = self.rr_cli.create(process_definition)

        retval = self.tms_cli.execute_transform(process_definition_id, data)

        self.assertEquals(retval, [3, 2, 1])
    def create_definition(self, definition_id, definition_type, executable,
                          name=None, description=None):

        if name is None:
            raise BadRequest("create_definition must have a name supplied")

        # note: we lose the description
        definition = ProcessDefinition(name=name)
        definition.executable = {'module': executable.get('module'),
                'class': executable.get('class')}
        definition.definition_type = definition_type
        created_definition = self.real_client.create_process_definition(
                definition, definition_id)
    def create_ingestion_configuration(self, exchange_point_id='', couch_storage=None, hdf_storage=None,number_of_workers=0):
        """
        @brief Setup ingestion workers to ingest all the data from a single exchange point.
        @param exchange_point_id is the resource id for the exchagne point to ingest from
        @param couch_storage is the specification of the couch database to use
        @param hdf_storage is the specification of the filesystem to use for hdf data files
        @param number_of_workers is the number of ingestion workers to create
        """

        if self.process_definition_id is None:
            process_definition = ProcessDefinition(name='ingestion_worker_process', description='Worker transform process for ingestion of datasets')
            process_definition.executable['module']='ion.processes.data.ingestion.ingestion_worker'
            process_definition.executable['class'] = 'IngestionWorker'
            self.process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
 

        # Give each ingestion configuration its own queue name to receive data on
        exchange_name = 'ingestion_queue'

        ##------------------------------------------------------------------------------------
        ## declare our intent to subscribe to all messages on the exchange point
        query = ExchangeQuery()

        subscription_id = self.clients.pubsub_management.create_subscription(query=query,\
            exchange_name=exchange_name, name='Ingestion subscription', description='Subscription for ingestion workers')

        ##------------------------------------------------------------------------------------------

        # create an ingestion_configuration instance and update the registry
        # @todo: right now sending in the exchange_point_id as the name...
        ingestion_configuration = IngestionConfiguration( name = self.XP)
        ingestion_configuration.description = '%s exchange point ingestion configuration' % self.XP
        ingestion_configuration.number_of_workers = number_of_workers

        if hdf_storage is not None:
            ingestion_configuration.hdf_storage.update(hdf_storage)

        if couch_storage is not None:
            ingestion_configuration.couch_storage.update(couch_storage)


        ingestion_configuration_id, _ = self.clients.resource_registry.create(ingestion_configuration)

        self._launch_transforms(
            ingestion_configuration.number_of_workers,
            subscription_id,
            ingestion_configuration_id,
            ingestion_configuration,
            self.process_definition_id
        )
        return ingestion_configuration_id
    def on_start(self):
        super(IngestionManagementService,self).on_start()
        self.event_publisher = EventPublisher(event_type="DatasetIngestionConfigurationEvent")


        #########################################################################################################
        #   The code for process_definition may not really belong here, but we do not have a different way so
        #   far to preload the process definitions. This will later probably be part of a set of predefinitions
        #   for processes.
        #########################################################################################################
        process_definition = ProcessDefinition(name='ingestion_worker_process', description='Worker transform process for ingestion of datasets')
        process_definition.executable['module']='ion.processes.data.ingestion.ingestion_worker'
        process_definition.executable['class'] = 'IngestionWorker'
        self.process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
    def _get_process_definition(self, data_process_definition_id=""):
        process_definition_id = ""
        if data_process_definition_id:
            process_definitions, _ = self.clients.resource_registry.find_objects(
                subject=data_process_definition_id, predicate=PRED.hasProcessDefinition, id_only=True
            )
            if process_definitions:
                process_definition_id = process_definitions[0]
            else:
                process_definition = ProcessDefinition()
                process_definition.name = "transform_data_process"
                process_definition.executable["module"] = "ion.processes.data.transforms.transform_prime"
                process_definition.executable["class"] = "TransformPrime"
                process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition)

        else:
            process_definitions, _ = self.clients.resource_registry.find_resources(
                name="transform_data_process", restype=RT.ProcessDefinition, id_only=True
            )
            if process_definitions:
                process_definition_id = process_definitions[0]
            else:
                process_definition = ProcessDefinition()
                process_definition.name = "transform_data_process"
                process_definition.executable["module"] = "ion.processes.data.transforms.transform_prime"
                process_definition.executable["class"] = "TransformPrime"
                process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition)
        return process_definition_id
    def pydap_server(self, process, config):
        pydap_module = config.get_safe('bootstrap.processes.pydap.module', 'ion.processes.data.externalization.lightweight_pydap')
        pydap_class  = config.get_safe('bootstrap.processes.pydap.class', 'LightweightPyDAP')

        use_pydap = config.get_safe('bootstrap.launch_pydap', False)


        process_definition = ProcessDefinition(
                name = 'pydap_server',
                description = 'Lightweight WSGI Server for PyDAP')
        process_definition.executable['module'] = pydap_module
        process_definition.executable['class'] = pydap_class

        self._create_and_launch(process_definition,use_pydap)
Example #23
0
    def run_external_transform(self):
        '''
        This example script illustrates how a transform can interact with the an outside process (very basic)
        it launches an external_transform example which uses the operating system command 'bc' to add 1 to the input

        Producer -> A -> 'FS.TEMP/transform_output'
        A is an external transform that spawns an OS process to increment the input by 1
        '''
        pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        tms_cli = TransformManagementServiceClient(node=self.container.node)
        procd_cli = ProcessDispatcherServiceClient(node=self.container.node)
        
        #-------------------------------
        # Process Definition
        #-------------------------------
        process_definition = ProcessDefinition(name='external_transform_definition')
        process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
        process_definition.executable['class'] = 'ExternalTransform'
        process_definition_id = procd_cli.create_process_definition(process_definition=process_definition)

        #-------------------------------
        # Streams
        #-------------------------------

        input_stream_id = pubsub_cli.create_stream(name='input_stream', original=True)
        
        #-------------------------------
        # Subscription
        #-------------------------------

        query = StreamQuery(stream_ids=[input_stream_id])
        input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='input_queue')

        #-------------------------------
        # Launch Transform
        #-------------------------------

        transform_id = tms_cli.create_transform(name='external_transform', 
              in_subscription_id=input_subscription_id,
              process_definition_id=process_definition_id,
              configuration={})
        tms_cli.activate_transform(transform_id)

        #-------------------------------
        # Launch Producer
        #-------------------------------

        id_p = self.container.spawn_process('myproducer', 'ion.processes.data.transforms.transform_example', 'TransformExampleProducer', {'process':{'type':'stream_process','publish_streams':{'out_stream':input_stream_id}},'stream_producer':{'interval':4000}})
        self.container.proc_manager.procs[id_p].start()
    def _do_launch_gate(self, platform_id, agent_config, timeout_spawn):
        """
        The method for when using the ProcessStateGate pattern, which is the
        one used by test_oms_launch2 to launch the root platform.
        """
        log.debug("_do_launch_gate: platform_id=%r, timeout_spawn=%s",
                  platform_id, timeout_spawn)

        pa_name = 'PlatformAgent_%s' % platform_id

        pdef = ProcessDefinition(name=pa_name)
        pdef.executable = {
            'module': PA_MOD,
            'class': PA_CLS
        }
        pdef_id = self._pd_client.create_process_definition(process_definition=pdef)

        log.debug("using schedule_process directly %r", platform_id)

        pid = self._pd_client.schedule_process(process_definition_id=pdef_id,
                                         schedule=None,
                                         configuration=agent_config)

        if timeout_spawn:
            # ProcessStateGate used as indicated in its pydoc (9/21/12)
            gate = ProcessStateGate(self._pd_client.read_process, pid, ProcessStateEnum.RUNNING)
            err_msg = None
            try:
                if not gate.await(timeout_spawn):
                    err_msg = "The platform agent instance did not spawn in " \
                              "%s seconds.  gate.wait returned false. " % \
                          timeout_spawn
                    log.error(err_msg)

            except Exception as e:
                log.error("Exception while waiting for platform agent instance "
                          "(platform_id=%r) "
                          "to spawn in %s seconds: %s",
                          platform_id, timeout_spawn, str(e)) #,exc_Info=True)

            if err_msg:
                raise PlatformException(err_msg)

        log.debug("_do_launch_gate: platform_id=%r: agent spawned, pid=%r "
                  "(ProcessStateGate pattern used)",
                  platform_id, pid)

        return pid
    def create_logger(self, name, stream_id=""):

        # logger process
        producer_definition = ProcessDefinition(name=name + "_logger")
        producer_definition.executable = {
            "module": "ion.processes.data.stream_granule_logger",
            "class": "StreamGranuleLogger",
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {"process": {"stream_id": stream_id}}
        pid = self.processdispatchclient.schedule_process(
            process_definition_id=logger_procdef_id, configuration=configuration
        )

        return pid
Example #26
0
    def post_process_dispatcher(self, config):

        process_definition = ProcessDefinition(
            name='ingestion_worker_process',
            description='Worker transform process for ingestion of datasets')
        process_definition.executable['module']='ion.processes.data.ingestion.ingestion_worker'
        process_definition.executable['class'] = 'IngestionWorker'
        self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)


        self.process_definition = ProcessDefinition(
            name='data_replay_process',
            description='Process for the replay of datasets')
        self.process_definition.executable['module']='ion.processes.data.replay_process'
        self.process_definition.executable['class'] = 'ReplayProcess'
        self.clients.process_dispatcher.create_process_definition(process_definition=self.process_definition)
    def create_process(name= '', module = '', class_name = '', configuration = None):
        '''
        A helper method to create a process
        '''

        producer_definition = ProcessDefinition(name=name)
        producer_definition.executable = {
            'module':module,
            'class': class_name
        }

        process_dispatcher = ProcessDispatcherServiceClient()

        procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
        pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration)

        return pid
    def ingestion_worker(self, process, config):
        # ingestion
        ingestion_module    = config.get_safe('bootstrap.processes.ingestion.module','ion.processes.data.ingestion.science_granule_ingestion_worker')
        ingestion_class     = config.get_safe('bootstrap.processes.ingestion.class' ,'ScienceGranuleIngestionWorker')
        ingestion_datastore = config.get_safe('bootstrap.processes.ingestion.datastore_name', 'datasets')
        ingestion_queue     = config.get_safe('bootstrap.processes.ingestion.queue' , 'science_granule_ingestion')
        ingestion_workers   = config.get_safe('bootstrap.processes.ingestion.workers', 1)
        #--------------------------------------------------------------------------------
        # Create ingestion workers
        #--------------------------------------------------------------------------------

        process_definition = ProcessDefinition(
            name='ingestion_worker_process',
            description='Worker transform process for ingestion of datasets')
        process_definition.executable['module']= ingestion_module
        process_definition.executable['class'] = ingestion_class
        ingestion_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition)
    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)

        return pid
    def registration_worker(self, process, config):
        res, meta = self.resource_registry.find_resources(name='registration_worker', restype=RT.ProcessDefinition)
        if len(res):
            return

        registration_module = config.get_safe('bootstrap.processes.registration.module', 'ion.processes.data.registration.registration_process')
        registration_class  = config.get_safe('bootstrap.processes.registration.class', 'RegistrationProcess')
        use_pydap = config.get_safe('bootstrap.use_pydap', False)


        process_definition = ProcessDefinition(
                name='registration_worker',
                description='For registering datasets with ERDDAP')
        process_definition.executable['module'] = registration_module
        process_definition.executable['class']  = registration_class


        self._create_and_launch(process_definition, use_pydap)
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient()
        self.rrclient = ResourceRegistryServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        datastore_name = CACHE_DATASTORE_NAME
        self.db = self.container.datastore_manager.get_datastore(
            datastore_name)
        self.stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM')

        self.process_definitions = {}
        ingestion_worker_definition = ProcessDefinition(
            name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':
            'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class': 'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space = 'science_granule_ingestion'
        self.exchange_point = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(
            self.process_definitions['ingestion_worker'], configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)
Example #32
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2cei.yml')
        self.pd_cli = ProcessDispatcherServiceClient(
            to_name="process_dispatcher")

        self.process_definition_id = uuid4().hex
        self.process_definition = ProcessDefinition(
            name='test',
            executable={
                'module': 'ion.agents.cei.test.test_haagent',
                'class': 'TestProcess'
            })

        self.pd_cli.create_process_definition(self.process_definition,
                                              self.process_definition_id)

        http_port = 8919
        http_port = self._start_webserver(port=http_port)

        self.resource_id = "haagent_4567"
        self._haa_name = "high_availability_agent"
        self._haa_config = {
            'server': {
                'trafficsentinel': {
                    'host': 'localhost',
                    'port': http_port,
                    'protocol': 'http',
                    'username': '******',
                    'password': '******'
                }
            },
            'highavailability': {
                'policy': {
                    'interval': 1,
                    'name': 'sensor',
                    'parameters': {
                        'metric': 'app_attributes:ml',
                        'sample_period': 600,
                        'sample_function': 'Average',
                        'cooldown_period': 5,
                        'scale_up_threshold': 2.0,
                        'scale_up_n_processes': 1,
                        'scale_down_threshold': 1.0,
                        'scale_down_n_processes': 1,
                        'maximum_processes': 5,
                        'minimum_processes': 1,
                    }
                },
                'process_definition_id': self.process_definition_id,
                "process_dispatchers": ['process_dispatcher']
            },
            'agent': {
                'resource_id': self.resource_id
            },
        }

        self._base_procs = self.pd_cli.list_processes()

        self.waiter = ProcessStateWaiter()
        self.waiter.start()

        self.container_client = ContainerAgentClient(node=self.container.node,
                                                     name=self.container.name)
        self._haa_pid = self.container_client.spawn_process(
            name=self._haa_name,
            module="ion.agents.cei.high_availability_agent",
            cls="HighAvailabilityAgent",
            config=self._haa_config)

        # Start a resource agent client to talk with the instrument agent.
        self._haa_pyon_client = SimpleResourceAgentClient(
            self.resource_id, process=FakeProcess())
        log.info('Got haa client %s.', str(self._haa_pyon_client))

        self.haa_client = HighAvailabilityAgentClient(self._haa_pyon_client)