예제 #1
0
    def _start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._raw_samples_received = []
        self._async_sample_result = AsyncResult()
        self._async_raw_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received parsed data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)

        from pyon.util.containers import create_unique_identifier

        stream_name = 'ctdpf_parsed'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" %
                    stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
예제 #2
0
    def _build_stream_config(self):
        """
        """
        if (not self.packet_config):
            return

        streams = self.packet_config
        log.debug("Streams: %s", streams)

        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient()

        # Create streams and subscriptions for each stream named in driver.
        self.stream_config = {}

        for stream_name in streams:
            pd_id = None
            try:
                pd_id = dataset_management.read_parameter_dictionary_by_name(
                    stream_name, id_only=True)
            except:
                log.error("No pd_id found for param_dict '%s'" % stream_name)
                if (self.use_default_stream):
                    log.error("using default pd '%s'" % DEFAULT_STREAM_NAME)
                    pd_id = dataset_management.read_parameter_dictionary_by_name(
                        DEFAULT_STREAM_NAME, id_only=True)

            if (not pd_id):
                raise IDKException(
                    "Missing parameter dictionary for stream '%s'" %
                    stream_name)

            log.debug("parameter dictionary id: %s" % pd_id)

            stream_def_id = pubsub_client.create_stream_definition(
                name=stream_name, parameter_dictionary_id=pd_id)

            #log.debug("Stream: %s (%s), stream_def_id %s" % (stream_name, type(stream_name), stream_def_id))
            pd = pubsub_client.read_stream_definition(
                stream_def_id).parameter_dictionary
            #log.debug("Parameter Dictionary: %s" % pd)

            try:
                stream_id, stream_route = pubsub_client.create_stream(
                    name=stream_name,
                    exchange_point='science_data',
                    stream_definition_id=stream_def_id)

                stream_config = dict(
                    stream_route=stream_route,
                    routing_key=stream_route.routing_key,
                    exchange_point=stream_route.exchange_point,
                    stream_id=stream_id,
                    stream_definition_ref=stream_def_id,
                    parameter_dictionary=pd)
                self.stream_config[stream_name] = stream_config
                #log.debug("Stream Config (%s): %s" % (stream_name, stream_config))
            except Exception as e:
                log.error("stream publisher exception: %s", e)
예제 #3
0
    def _build_stream_config(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient()

        encoder = IonObjectSerializer()

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}

        stream_name = 'ctdpf_parsed'
        param_dict_name = 'ctdpf_parsed'
        pd_id = dataset_management.read_parameter_dictionary_by_name(param_dict_name, id_only=True)
        stream_def_id = pubsub_client.create_stream_definition(name=stream_name, parameter_dictionary_id=pd_id)
        stream_def = pubsub_client.read_stream_definition(stream_def_id)
        stream_def_dict = encoder.serialize(stream_def)
        pd = stream_def.parameter_dictionary
        stream_id, stream_route = pubsub_client.create_stream(name=stream_name,
                                                exchange_point='science_data',
                                                stream_definition_id=stream_def_id)
        stream_config = dict(routing_key=stream_route.routing_key,
                                 exchange_point=stream_route.exchange_point,
                                 stream_id=stream_id,
                                 parameter_dictionary=pd,
                                 stream_def_dict=stream_def_dict)
        self._stream_config[stream_name] = stream_config
예제 #4
0
    def load_data_product(self):
        dset_i = 0
        dataset_management      = DatasetManagementServiceClient()
        pubsub_management       = PubsubManagementServiceClient()
        data_product_management = DataProductManagementServiceClient()
        resource_registry       = self.container.instance.resource_registry
        dp_obj = DataProduct(
            name='instrument_data_product_%i' % dset_i,
            description='ctd stream test',
            processing_level_code='Parsed_Canonical')
        pdict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id = pubsub_management.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id)
        data_product_id = data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.addCleanup(data_product_management.delete_data_product, data_product_id)
        data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(data_product_management.suspend_data_product_persistence, data_product_id)

        stream_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasStream', id_only=True)
        stream_id = stream_ids[0]
        route = pubsub_management.read_stream_route(stream_id)

        dataset_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True)
        dataset_id = dataset_ids[0]

        return data_product_id, stream_id, route, stream_def_id, dataset_id
예제 #5
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
 def setUp(self):
     self._start_container()
     self.container.start_rel_from_url('res/deploy/r2deploy.yml')
     self.dataset_management = DatasetManagementServiceClient()
     self.data_product_management = DataProductManagementServiceClient()
     self.pubsub_management = PubsubManagementServiceClient()
     self.resource_registry = self.container.resource_registry
예제 #7
0
    def _build_stream_config(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient() 
           
        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}

        streams = {
            'parsed' : 'ctd_parsed_param_dict',
            'raw' : 'ctd_raw_param_dict'
        }

        for (stream_name, param_dict_name) in streams.iteritems():
            pd_id = dataset_management.read_parameter_dictionary_by_name(DEFAULT_PARAM_DICT, id_only=True)
            if (not pd_id):
                log.error("No pd_id found for param_dict '%s'" % DEFAULT_PARAM_DICT)

            stream_def_id = pubsub_client.create_stream_definition(name=stream_name,
                                                                   parameter_dictionary_id=pd_id)
            pd = None
            stream_id, stream_route = pubsub_client.create_stream(name=stream_name,
                                                exchange_point='science_data',
                                                stream_definition_id=stream_def_id)

            stream_config = dict(stream_route=stream_route,
                                 routing_key=stream_route.routing_key,
                                 exchange_point=stream_route.exchange_point,
                                 stream_id=stream_id,
                                 stream_definition_ref=stream_def_id,
                                 parameter_dictionary=pd)

            self.stream_config[stream_name] = stream_config    
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process = TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client,
                                  self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []
예제 #10
0
def build_stream_config(streams):
    """
    """
    # Create a pubsub client to create streams.
    pubsub_client = PubsubManagementServiceClient(node=cc.node)
    dataset_management = DatasetManagementServiceClient() 
    
    # Create streams and subscriptions for each stream named in driver.
    agent_stream_config = {}

    for (stream_name, param_dict_name) in streams.iteritems():
        pd_id = dataset_management.read_parameter_dictionary_by_name(param_dict_name, id_only=True)

        stream_def_id = pubsub_client.create_stream_definition(name=stream_name, parameter_dictionary_id=pd_id)
        pd            = pubsub_client.read_stream_definition(stream_def_id).parameter_dictionary

        stream_id, stream_route = pubsub_client.create_stream(name=stream_name,
                                            exchange_point='science_data',
                                            stream_definition_id=stream_def_id)

        stream_config = dict(stream_route=stream_route,
                             routing_key=stream_route.routing_key,
                             exchange_point=stream_route.exchange_point,
                             stream_id=stream_id,
                             stream_definition_ref=stream_def_id,
                             parameter_dictionary=pd)
        agent_stream_config[stream_name] = stream_config

    return agent_stream_config
    def load_data_product(self):
        dset_i = 0
        dataset_management      = DatasetManagementServiceClient()
        pubsub_management       = PubsubManagementServiceClient()
        data_product_management = DataProductManagementServiceClient()
        resource_registry       = self.container.instance.resource_registry

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name='instrument_data_product_%i' % dset_i,
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id = pubsub_management.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id)
        data_product_id = data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.addCleanup(data_product_management.delete_data_product, data_product_id)
        data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(data_product_management.suspend_data_product_persistence, data_product_id)

        stream_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasStream', id_only=True)
        stream_id = stream_ids[0]
        route = pubsub_management.read_stream_route(stream_id)

        dataset_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True)
        dataset_id = dataset_ids[0]

        return data_product_id, stream_id, route, stream_def_id, dataset_id
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataset_management = self.datasetclient
예제 #13
0
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
예제 #14
0
def build_stream_config(streams):
    """
    """
    # Create a pubsub client to create streams.
    pubsub_client = PubsubManagementServiceClient(node=cc.node)
    dataset_management = DatasetManagementServiceClient()

    # Create streams and subscriptions for each stream named in driver.
    agent_stream_config = {}

    for (stream_name, param_dict_name) in streams.iteritems():
        pd_id = dataset_management.read_parameter_dictionary_by_name(
            param_dict_name, id_only=True)

        stream_def_id = pubsub_client.create_stream_definition(
            name=stream_name, parameter_dictionary_id=pd_id)
        pd = pubsub_client.read_stream_definition(
            stream_def_id).parameter_dictionary

        stream_id, stream_route = pubsub_client.create_stream(
            name=stream_name,
            exchange_point='science_data',
            stream_definition_id=stream_def_id)

        stream_config = dict(stream_route=stream_route,
                             routing_key=stream_route.routing_key,
                             exchange_point=stream_route.exchange_point,
                             stream_id=stream_id,
                             stream_definition_ref=stream_def_id,
                             parameter_dictionary=pd)
        agent_stream_config[stream_name] = stream_config

    return agent_stream_config
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # create missing data process definition
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
                            description="normally in preload",
                            module='ion.processes.data.transforms.logical_transform',
                            class_name='logical_transform')
        self.dataprocessclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
예제 #16
0
    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(
            node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()
예제 #17
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)
예제 #18
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(
            node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        #print 'TestObservatoryManagementServiceIntegration: started services'

        self.event_publisher = EventPublisher()
예제 #19
0
    def setUp(self):
        # Start container
        super(TestActivateRSNVel3DInstrument, self).setUp()
        config = DotDict()

        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
예제 #20
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient()
        self.rrclient = ResourceRegistryServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        datastore_name = CACHE_DATASTORE_NAME
        self.db = self.container.datastore_manager.get_datastore(
            datastore_name)
        self.stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM')

        self.process_definitions = {}
        ingestion_worker_definition = ProcessDefinition(
            name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':
            'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class': 'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space = 'science_granule_ingestion'
        self.exchange_point = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(
            self.process_definitions['ingestion_worker'], configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)
 def publish_rdt_to_data_product(cls,data_product_id, rdt, connection_id='', connection_index=''):
     resource_registry       = Container.instance.resource_registry
     pubsub_management       = PubsubManagementServiceClient()
     stream_ids, _ = resource_registry.find_objects(data_product_id,'hasStream',id_only=True)
     stream_id = stream_ids[0]
     route = pubsub_management.read_stream_route(stream_id)
     publisher = StandaloneStreamPublisher(stream_id,route)
     publisher.publish(rdt.to_granule(connection_id=connection_id, connection_index=connection_index))
예제 #22
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever    = DataRetrieverServiceClient(node=self.container.node)

        self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)

        # Data async and subscription  TODO: Replace with new subscriber
        self._finished_count = None
        #TODO: Switch to gevent.queue.Queue
        self._async_finished_result = AsyncResult()
        self._finished_events_received = []
        self._finished_event_subscriber = None
        self._start_finished_event_subscriber()
        self.addCleanup(self._stop_finished_event_subscriber)


        self.DVR_CONFIG = {}
        self.DVR_CONFIG = {
            'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler',
            'dvr_cls' : 'SlocumDataHandler',
            }

        self._setup_resources()

        self.agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : {},
            'agent'         : {'resource_id': self.EDA_RESOURCE_ID},
            'test_mode' : True
        }

        datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance,  name='ExternalDatasetAgentInstance1', description='external data agent instance',
                                              handler_module=self.EDA_MOD, handler_class=self.EDA_CLS,
                                              dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config )
        self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj,
                                                                                                 external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID)


        #TG: Setup/configure the granule logger to log granules as they're published
        pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id)

        dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id)
        print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj


        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('datasetagentclient', name=pid,  process=FakeProcess())
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))
예제 #23
0
class DemuxTransformIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_client = PubsubManagementServiceClient()
        self.queue_cleanup = list()


    def tearDown(self):
        for queue in self.queue_cleanup:
            if isinstance(queue,ExchangeNameQueue):
                queue.delete()
            elif isinstance(queue,basestring):
                xn = self.container.ex_manager.create_xn_queue(queue)
                xn.delete()


    

    def test_demux(self):
        self.stream0, self.route0 = self.pubsub_client.create_stream('stream0', exchange_point='test')
        self.stream1, self.route1 = self.pubsub_client.create_stream('stream1', exchange_point='main_data')
        self.stream2, self.route2 = self.pubsub_client.create_stream('stream2', exchange_point='alt_data')
        
        self.r_stream1 = gevent.event.Event()
        self.r_stream2 = gevent.event.Event()

        def process(msg, stream_route, stream_id):
            if stream_id == self.stream1:
                self.r_stream1.set()
            elif stream_id == self.stream2:
                self.r_stream2.set()
        
        self.container.spawn_process('demuxer', 'ion.processes.data.transforms.mux', 'DemuxTransform', {'process':{'out_streams':[self.stream1, self.stream2]}}, 'demuxer_pid')
        self.queue_cleanup.append('demuxer_pid') 
        
        sub1 = StandaloneStreamSubscriber('sub1', process)
        sub2 = StandaloneStreamSubscriber('sub2', process)
        sub1.xn.bind(self.route1.routing_key, self.container.ex_manager.create_xp('main_data'))
        sub2.xn.bind(self.route2.routing_key, self.container.ex_manager.create_xp('alt_data'))
        sub1.start()
        sub2.start()

        self.queue_cleanup.append(sub1.xn)
        self.queue_cleanup.append(sub2.xn)
        xn = self.container.ex_manager.create_xn_queue('demuxer_pid')
        xn.bind(self.route0.routing_key, self.container.ex_manager.create_xp(self.route0.exchange_point))
        domino = StandaloneStreamPublisher(self.stream0, self.route0)
        domino.publish('test')

        self.assertTrue(self.r_stream1.wait(2))
        self.assertTrue(self.r_stream2.wait(2))

        self.container.proc_manager.terminate_process('demuxer_pid')
        
        sub1.stop()
        sub2.stop()
예제 #24
0
    def setUp(self):
        self._start_container()

        self._pp = pprint.PrettyPrinter()

        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        self._get_platform_attributes()

        url = OmsTestMixin.start_http_server()
        log.info("TestPlatformInstrument:setup http url %s", url)

        result = self.oms.event.register_event_listener(url)
        log.info(
            "TestPlatformInstrument:setup register_event_listener result %s",
            result)

        #        response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall')
        #        log.info("TestPlatformInstrument:setup get_platform_ports %s", response)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # see _set_receive_timeout
        self._receive_timeout = 177

        self.instrument_device = ''
        self.platform_device = ''
        self.platform_agent_instance_id = ''
        self._pa_client = ''

        def done():
            CIOMSClientFactory.destroy_instance(self.oms)
            event_notifications = OmsTestMixin.stop_http_server()
            log.info("event_notifications = %s" % str(event_notifications))

        self.addCleanup(done)
    def _build_stream_config(self):
        """
        """
        if not self.packet_config:
            return

        streams = self.packet_config
        log.debug("Streams: %s", streams)

        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient()

        # Create streams and subscriptions for each stream named in driver.
        self.stream_config = {}

        for stream_name in streams:
            pd_id = None
            try:
                pd_id = dataset_management.read_parameter_dictionary_by_name(stream_name, id_only=True)
            except:
                log.error("No pd_id found for param_dict '%s'" % stream_name)
                if self.use_default_stream:
                    log.error("using default pd '%s'" % DEFAULT_STREAM_NAME)
                    pd_id = dataset_management.read_parameter_dictionary_by_name(DEFAULT_STREAM_NAME, id_only=True)

            if not pd_id:
                raise IDKException("Missing parameter dictionary for stream '%s'" % stream_name)

            log.debug("parameter dictionary id: %s" % pd_id)

            stream_def_id = pubsub_client.create_stream_definition(name=stream_name, parameter_dictionary_id=pd_id)

            # log.debug("Stream: %s (%s), stream_def_id %s" % (stream_name, type(stream_name), stream_def_id))
            pd = pubsub_client.read_stream_definition(stream_def_id).parameter_dictionary
            # log.debug("Parameter Dictionary: %s" % pd)

            try:
                stream_id, stream_route = pubsub_client.create_stream(
                    name=stream_name, exchange_point="science_data", stream_definition_id=stream_def_id
                )

                stream_config = dict(
                    stream_route=stream_route,
                    routing_key=stream_route.routing_key,
                    exchange_point=stream_route.exchange_point,
                    stream_id=stream_id,
                    stream_definition_ref=stream_def_id,
                    parameter_dictionary=pd,
                )
                self.stream_config[stream_name] = stream_config
                # log.debug("Stream Config (%s): %s" % (stream_name, stream_config))
            except Exception as e:
                log.error("stream publisher exception: %s", e)

            log.debug("Stream config setup complete.")
예제 #26
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()

        self.pdicts = {}
        self.queue_cleanup = list()
        self.exchange_cleanup = list()
예제 #27
0
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url(
            'res/deploy/r2deploy.yml')  # Because hey why not?!

        self.dataset_management = DatasetManagementServiceClient()
        self.data_process_management = DataProcessManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
예제 #28
0
def start_ctd_publisher(container):


    pubsubclient = PubsubManagementServiceClient(node=container.node)
    stream_id, route = pubsubclient.create_stream('ctd_publisher', exchange_point='science_data')

    pid = container.spawn_process('ctdpublisher', 'ion.processes.data.sinusoidal_stream_publisher','SinusoidalCtdPublisher',{'process':{'stream_id':stream_id}})

    print 'stream_id=' + stream_id
    print 'pid=' + pid
 def stop_data_subscribers(self):
     for subscriber in self.data_subscribers.values():
         pubsub_client = PubsubManagementServiceClient()
         if hasattr(subscriber,'subscription_id'):
             try:
                 pubsub_client.deactivate_subscription(subscriber.subscription_id)
                 pubsub_client.delete_subscription(subscriber.subscription_id)
                 subscriber.stop()
             except:
                 pass
예제 #30
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management = DatasetManagementServiceClient()
        self.pubsub_management  = PubsubManagementServiceClient()

        self.rdt                      = None
        self.data_producer_id         = None
        self.provider_metadata_update = None
        self.event                    = Event()
 def _stop_data_subscribers(self):
     for subscriber in self._data_subscribers:
         pubsub_client = PubsubManagementServiceClient()
         if hasattr(subscriber,'subscription_id'):
             try:
                 pubsub_client.deactivate_subscription(subscriber.subscription_id,timeout=120.6)
             except:
                 pass
             pubsub_client.delete_subscription(subscriber.subscription_id,timeout=120.7)
         subscriber.stop()
예제 #32
0
    def _start_data_subscribers(self, count):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        #self._async_data_result = AsyncResult()

        strXterm = "xterm -T InstrumentScienceData -sb -rightbar "
        pOpenString = strXterm + " -e tail -f " + PIPE_PATH
        subprocess.Popen([
            'xterm', '-T', 'InstrumentScienceData', '-e', 'tail', '-f',
            PIPE_PATH
        ])

        #subprocess.Popen(pOpenString)

        #self.pipeData = open(PIPE_PATH, "w", 1)

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            print 'Received message on ' + str(stream_id) + ' (' + str(
                stream_route.exchange_point) + ',' + str(
                    stream_route.routing_key) + ')'
            log.info('Received message on %s (%s,%s)', stream_id,
                     stream_route.exchange_point, stream_route.routing_key)

            self.pipeData = open(PIPE_PATH, "w", 1)
            self.pipeData.write(str(message))
            self.pipeData.flush()
            self.pipeData.close()

            self._samples_received.append(message)
            #if len(self._samples_received) == count:
            #self._async_data_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():

            stream_id = stream_config['stream_id']

            # Create subscriptions for each stream.

            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self._data_subscribers.append(sub)
            print 'stream_id: %s' % stream_id
            sub_id = pubsub_client.create_subscription(name=exchange_name,
                                                       stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
예제 #33
0
    def _build_stream_config(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient()
        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}

        streams = {
            'parsed': 'ctd_parsed_param_dict',
            'raw': 'ctd_raw_param_dict'
        }

        for (stream_name, param_dict_name) in streams.iteritems():
            pd_id = dataset_management.read_parameter_dictionary_by_name(
                param_dict_name, id_only=True)

            stream_def_id = pubsub_client.create_stream_definition(
                name=stream_name, parameter_dictionary_id=pd_id)
            pd = pubsub_client.read_stream_definition(
                stream_def_id).parameter_dictionary

            stream_id, stream_route = pubsub_client.create_stream(
                name=stream_name,
                exchange_point='science_data',
                stream_definition_id=stream_def_id)

            stream_config = dict(
                stream_route=stream_route,
                routing_key=stream_route.routing_key,
                exchange_point=stream_route.exchange_point,
                stream_id=stream_id,
                stream_definition_ref=stream_def_id,
                parameter_dictionary=pd)

            if stream_name == 'parsed':

                type = 'IntervalAlarmDef'
                kwargs = {
                    'name': 'test_sim_warning',
                    'stream_name': 'parsed',
                    'value_id': 'temp',
                    'message': 'Temperature is above test range of 5.0.',
                    'type': StreamAlarmType.WARNING,
                    'upper_bound': 5.0,
                    'upper_rel_op': '<'
                }
                alarm = {}
                alarm['type'] = type
                alarm['kwargs'] = kwargs
                alarms = [alarm]
                stream_config['alarms'] = alarms

            self._stream_config[stream_name] = stream_config
예제 #34
0
    def setUp(self):

        # Start container.
        log.info('Staring capability container.')
        self._start_container()

        # Bring up services in a deploy file (no need to message)
        log.info('Staring deploy services.')
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        
        # Create a pubsub client to create streams.
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
예제 #35
0
 def setUp(self):
     # Start container
     self._start_container()
     self.container.start_rel_from_url('res/deploy/r2deploy.yml')
     config = dict(op="load",
                   scenario="NOSE",
                   attachments="res/preload/r2_ioc/attachments")
     self.container.spawn_process("Loader",
                                  "ion.processes.bootstrap.ion_loader",
                                  "IONLoader",
                                  config=config)
     self.pubsub = PubsubManagementServiceClient()
     self.dams = DataAcquisitionManagementServiceClient()
예제 #36
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub_management    = PubsubManagementServiceClient()
        self.dataset_management   = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.data_acquisition_management = DataAcquisitionManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node)
        self.resource_registry       = self.container.resource_registry
        self.context_ids = self.build_param_contexts()
        self.setup_resources()
 def clean_subscriptions():
     ingestion_management = IngestionManagementServiceClient()
     pubsub = PubsubManagementServiceClient()
     rr     = ResourceRegistryServiceClient()
     ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True)
     for ic in ingestion_config_ids:
         subscription_ids, assocs = rr.find_objects(subject=ic, predicate=PRED.hasSubscription, id_only=True)
         for subscription_id, assoc in zip(subscription_ids, assocs):
             rr.delete_association(assoc)
             try:
                 pubsub.deactivate_subscription(subscription_id)
             except:
                 log.exception("Unable to decativate subscription: %s", subscription_id)
             pubsub.delete_subscription(subscription_id)
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
예제 #40
0
    def _build_stream_config(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient() 
        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}

        streams = {
            'parsed' : 'ctd_parsed_param_dict',
            'raw'    : 'ctd_raw_param_dict'
        }

        for (stream_name, param_dict_name) in streams.iteritems():
            pd_id = dataset_management.read_parameter_dictionary_by_name(param_dict_name, id_only=True)

            stream_def_id = pubsub_client.create_stream_definition(name=stream_name, parameter_dictionary_id=pd_id)
            pd            = pubsub_client.read_stream_definition(stream_def_id).parameter_dictionary

            stream_id, stream_route = pubsub_client.create_stream(name=stream_name,
                                                exchange_point='science_data',
                                                stream_definition_id=stream_def_id)

            stream_config = dict(stream_route=stream_route,
                                 routing_key=stream_route.routing_key,
                                 exchange_point=stream_route.exchange_point,
                                 stream_id=stream_id,
                                 stream_definition_ref=stream_def_id,
                                 parameter_dictionary=pd)
            
            if stream_name == 'parsed':
                
                type = 'IntervalAlarmDef'
                kwargs = {
                    'name' : 'test_sim_warning',
                    'stream_name' : 'parsed',
                    'value_id' : 'temp',
                    'message' : 'Temperature is above test range of 5.0.',
                    'type' : StreamAlarmType.WARNING,
                    'upper_bound' : 5.0,
                    'upper_rel_op' : '<'
                }
                alarm = {}
                alarm['type'] = type
                alarm['kwargs'] = kwargs
                alarms = [alarm]
                stream_config['alarms'] = alarms
            
            self._stream_config[stream_name] = stream_config
예제 #41
0
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        # Use the network definition provided by RSN OMS directly.
        rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
        self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
        # get serialized version for the configuration:
        self._network_definition_ser = NetworkUtil.serialize_network_definition(
            self._network_definition)
        if log.isEnabledFor(logging.DEBUG):
            log.debug("NetworkDefinition serialization:\n%s",
                      self._network_definition_ser)

        self.platformModel_id = None

        self.all_platforms = {}
        self.agent_streamconfig_map = {}

        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber()

        self._set_up_DataProduct_obj()
        self._set_up_PlatformModel_obj()
예제 #42
0
    def run_external_transform(self):
        '''
        This example script illustrates how a transform can interact with the an outside process (very basic)
        it launches an external_transform example which uses the operating system command 'bc' to add 1 to the input

        Producer -> A -> 'FS.TEMP/transform_output'
        A is an external transform that spawns an OS process to increment the input by 1
        '''
        pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        tms_cli = TransformManagementServiceClient(node=self.container.node)
        procd_cli = ProcessDispatcherServiceClient(node=self.container.node)
        
        #-------------------------------
        # Process Definition
        #-------------------------------
        process_definition = ProcessDefinition(name='external_transform_definition')
        process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
        process_definition.executable['class'] = 'ExternalTransform'
        process_definition_id = procd_cli.create_process_definition(process_definition=process_definition)

        #-------------------------------
        # Streams
        #-------------------------------

        input_stream_id = pubsub_cli.create_stream(name='input_stream', original=True)
        
        #-------------------------------
        # Subscription
        #-------------------------------

        query = StreamQuery(stream_ids=[input_stream_id])
        input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='input_queue')

        #-------------------------------
        # Launch Transform
        #-------------------------------

        transform_id = tms_cli.create_transform(name='external_transform', 
              in_subscription_id=input_subscription_id,
              process_definition_id=process_definition_id,
              configuration={})
        tms_cli.activate_transform(transform_id)

        #-------------------------------
        # Launch Producer
        #-------------------------------

        id_p = self.container.spawn_process('myproducer', 'ion.processes.data.transforms.transform_example', 'TransformExampleProducer', {'process':{'type':'stream_process','publish_streams':{'out_stream':input_stream_id}},'stream_producer':{'interval':4000}})
        self.container.proc_manager.procs[id_p].start()
예제 #43
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)

        self.ctd_stream1_id = self.pubsub_cli.create_stream(name="SampleStream1",
                                                            description="Sample Stream 1 Description")

        self.ctd_stream2_id = self.pubsub_cli.create_stream(name="SampleStream2",
                                                            description="Sample Stream 2 Description")

        # Make a subscription to two input streams
        exchange_name = "a_queue"
        query = StreamQuery([self.ctd_stream1_id, self.ctd_stream2_id])

        self.ctd_subscription_id = self.pubsub_cli.create_subscription(query,
                                                                       exchange_name,
                                                                       "SampleSubscription",
                                                                       "Sample Subscription Description")

        # Make a subscription to all streams on an exchange point
        exchange_name = "another_queue"
        query = ExchangeQuery()

        self.exchange_subscription_id = self.pubsub_cli.create_subscription(query,
            exchange_name,
            "SampleExchangeSubscription",
            "Sample Exchange Subscription Description")


        pid = self.container.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = self.container.proc_manager.procs[pid]


        # Normally the user does not see or create the publisher, this is part of the containers business.
        # For the test we need to set it up explicitly
        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=self.container.node)

        self.ctd_stream1_publisher = publisher_registrar.create_publisher(stream_id=self.ctd_stream1_id)

        self.ctd_stream2_publisher = publisher_registrar.create_publisher(stream_id=self.ctd_stream2_id)


        # Cheat and use the cc as the process - I don't think it is used for anything...
        self.stream_subscriber = StreamSubscriberRegistrar(process=dummy_process, node=self.container.node)
예제 #44
0
    def _build_stream_config(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient()
        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}

        streams = {"parsed": "ctd_parsed_param_dict", "raw": "ctd_raw_param_dict"}

        for (stream_name, param_dict_name) in streams.iteritems():
            pd_id = dataset_management.read_parameter_dictionary_by_name(param_dict_name, id_only=True)

            stream_def_id = pubsub_client.create_stream_definition(name=stream_name, parameter_dictionary_id=pd_id)
            pd = pubsub_client.read_stream_definition(stream_def_id).parameter_dictionary

            stream_id, stream_route = pubsub_client.create_stream(
                name=stream_name, exchange_point="science_data", stream_definition_id=stream_def_id
            )

            stream_config = dict(
                stream_route=stream_route,
                routing_key=stream_route.routing_key,
                exchange_point=stream_route.exchange_point,
                stream_id=stream_id,
                stream_definition_ref=stream_def_id,
                parameter_dictionary=pd,
            )

            if stream_name == "parsed":

                type = "IntervalAlarmDef"
                kwargs = {
                    "name": "test_sim_warning",
                    "stream_name": "parsed",
                    "value_id": "temp",
                    "message": "Temperature is above test range of 5.0.",
                    "type": StreamAlarmType.WARNING,
                    "upper_bound": 5.0,
                    "upper_rel_op": "<",
                }
                alarm = {}
                alarm["type"] = type
                alarm["kwargs"] = kwargs
                alarms = [alarm]
                stream_config["alarms"] = alarms

            self._stream_config[stream_name] = stream_config
예제 #45
0
    def _start_data_subscriber(self, config, callback):
        """
        Setup and start a data subscriber
        """
        exchange_point = config['exchange_point']
        stream_id = config['stream_id']

        sub = StandaloneStreamSubscriber(exchange_point, callback)
        sub.start()
        self._data_subscribers.append(sub)

        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        sub_id = pubsub_client.create_subscription(name=exchange_point, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
    def clean_subscriptions():
        ingestion_management = IngestionManagementServiceClient()
        pubsub = PubsubManagementServiceClient()
        rr = ResourceRegistryServiceClient()
        ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True)
        for ic in ingestion_config_ids:

            assocs = rr.find_associations(subject=ic, predicate=PRED.hasSubscription, id_only=False)
            for assoc in assocs:
                rr.delete_association(assoc)
                try:
                    pubsub.deactivate_subscription(assoc.o)
                except:
                    pass
                pubsub.delete_subscription(assoc.o)
예제 #47
0
    def setUp(self):
        super(CtdbpTransformsIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub = PubsubManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.data_process_management = DataProcessManagementServiceClient()
        self.dataproduct_management = DataProductManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()

        # This is for the time values inside the packets going into the transform
        self.i = 0
예제 #48
0
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        
        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
 def setUp(self):
     self._start_container()
     self.container.start_rel_from_url('res/deploy/r2deploy.yml')
     self.dataset_management      = DatasetManagementServiceClient()
     self.data_product_management = DataProductManagementServiceClient()
     self.pubsub_management       = PubsubManagementServiceClient()
     self.resource_registry       = self.container.resource_registry
예제 #50
0
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        
        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()

        self.egg_url_good = "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1a-py2.7.egg"
        self.egg_url_bad  = "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.1a-py2.7.egg"
        self.egg_url_404  = "http://sddevrepo.oceanobservatories.org/releases/completely_made_up_404.egg"
    def setUp(self):
        # Start container
        super(TestActivateInstrumentIntegration, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(TestActivateInstrumentIntegration.es_cleanup)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.usernotificationclient = UserNotificationServiceClient()

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2deploy.yml")

        self._pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        self.PLATFORM_CONFIG = {"platform_id": self.PLATFORM_ID, "driver_config": DVR_CONFIG}

        # Start data suscribers, add stop to cleanup.
        # Define stream_config.
        self._async_data_result = AsyncResult()
        self._data_greenlets = []
        self._stream_config = {}
        self._samples_received = []
        self._data_subscribers = []
        self._start_data_subscribers()
        self.addCleanup(self._stop_data_subscribers)

        self._agent_config = {
            "agent": {"resource_id": PA_RESOURCE_ID},
            "stream_config": self._stream_config,
            # pass platform config here
            "platform_config": self.PLATFORM_CONFIG,
        }

        log.debug("launching with agent_config=%s", str(self._agent_config))

        self._launcher = LauncherFactory.createLauncher()
        self._pid = self._launcher.launch(self.PLATFORM_ID, self._agent_config)

        log.debug("LAUNCHED PLATFORM_ID=%r", self.PLATFORM_ID)

        # Start a resource agent client to talk with the agent.
        self._pa_client = ResourceAgentClient(PA_RESOURCE_ID, process=FakeProcess())
        log.info("Got pa client %s." % str(self._pa_client))
예제 #53
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []
예제 #55
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        # create missing data process definition
        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
                            description="normally in preload",
                            module='ion.processes.data.transforms.logical_transform',
                            class_name='logical_transform')
        self.dsmsclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
예제 #56
0
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient()
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.RR2  = EnhancedResourceRegistryClient(self.RR)


        # Use the network definition provided by RSN OMS directly.
        rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
        self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
        # get serialized version for the configuration:
        self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
        if log.isEnabledFor(logging.TRACE):
            log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser)


        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber()
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
    def setUp(self):
        # set up the container
        self._start_container()

        self.cc = ContainerAgentClient(node=self.container.node,name=self.container.name)

        self.cc.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.cc.node)
        self.tms_cli = TransformManagementServiceClient(node=self.cc.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.cc.node)

        self.input_stream = IonObject(RT.Stream,name='ctd1 output', description='output from a ctd')
        self.input_stream.original = True
        self.input_stream.mimetype = 'hdf'
        self.input_stream_id = self.pubsub_cli.create_stream(self.input_stream)

        self.input_subscription = IonObject(RT.Subscription,name='ctd1 subscription', description='subscribe to this if you want ctd1 data')
        self.input_subscription.query['stream_id'] = self.input_stream_id
        self.input_subscription.exchange_name = 'a queue'
        self.input_subscription_id = self.pubsub_cli.create_subscription(self.input_subscription)

        self.output_stream = IonObject(RT.Stream,name='transform output', description='output from the transform process')
        self.output_stream.original = True
        self.output_stream.mimetype='raw'
        self.output_stream_id = self.pubsub_cli.create_stream(self.output_stream)


        self.process_definition = IonObject(RT.ProcessDefinition,name='transform_process')
        self.process_definition.executable = {'module': 'ion.services.dm.transformation.example.transform_example',
                                              'class':'TransformExample'}
        self.process_definition_id, _= self.rr_cli.create(self.process_definition)
예제 #59
0
    def setUp(self):
        # set up the container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2dm.yml")

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        self.tms_cli = TransformManagementServiceClient(node=self.container.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
        self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.input_stream_id = self.pubsub_cli.create_stream(name="input_stream", original=True)

        self.input_subscription_id = self.pubsub_cli.create_subscription(
            query=StreamQuery(stream_ids=[self.input_stream_id]),
            exchange_name="transform_input",
            name="input_subscription",
        )

        self.output_stream_id = self.pubsub_cli.create_stream(name="output_stream", original=True)

        self.process_definition = ProcessDefinition(name="basic_transform_definition")
        self.process_definition.executable = {
            "module": "ion.processes.data.transforms.transform_example",
            "class": "TransformExample",
        }
        self.process_definition_id = self.procd_cli.create_process_definition(
            process_definition=self.process_definition
        )
예제 #60
0
    def _start_data_subscribers(self, count):
        """
        """        
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
                
        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        #self._async_data_result = AsyncResult()

        strXterm = "xterm -T InstrumentScienceData -sb -rightbar "
        pOpenString = strXterm + " -e tail -f " + PIPE_PATH
        subprocess.Popen(['xterm', '-T', 'InstrumentScienceData', '-e', 'tail', '-f', PIPE_PATH])        
        #subprocess.Popen(pOpenString)
                
        #self.pipeData = open(PIPE_PATH, "w", 1) 

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            print 'Received message on ' + str(stream_id) + ' (' + str(stream_route.exchange_point) + ',' + str(stream_route.routing_key) + ')'
            log.info('Received message on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
             
            self.pipeData = open(PIPE_PATH, "w", 1) 
            self.pipeData.write(str(message))
            self.pipeData.flush()
            self.pipeData.close()      

            self._samples_received.append(message)
            #if len(self._samples_received) == count:
                #self._async_data_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():
            
            stream_id = stream_config['stream_id']
            
            # Create subscriptions for each stream.

            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self._data_subscribers.append(sub)
            print 'stream_id: %s' % stream_id
            sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)