def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []
Beispiel #2
0
    def setUp(self):
        # Start container
        super(TestActivateRSNVel3DInstrument, self).setUp()
        config = DotDict()

        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
Beispiel #3
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(
            node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        #print 'TestObservatoryManagementServiceIntegration: started services'

        self.event_publisher = EventPublisher()
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataset_management = self.datasetclient
Beispiel #5
0
    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(
            node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()
Beispiel #6
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)
Beispiel #7
0
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            #from pyon.core.bootstrap import container_instance
            from mi.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            # Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance, 'has_capability') and container_instance.has_capability('RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(restype='Service', name=service_name)
            elif not local_rr_only:
                from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                rr_client = ResourceRegistryServiceClient(container_instance.node)
                service_resource, _ = rr_client.find_resources(restype='Service', name=service_name)
            else:
                log.warn("is_service_available(%s) - No RR connection" % service_name)

            # The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and len(service_resource) > 1:
                log.warn("is_service_available(%s) - Found multiple service instances: %s", service_name, service_resource)

            # MM 2013-08-17: Added PENDING, because this means service will be there shortly
            if service_resource and service_resource[0].state in (ServiceStateEnum.READY, ServiceStateEnum.STEADY, ServiceStateEnum.PENDING):
                return True
            elif service_resource:
                log.warn("is_service_available(%s) - Service resource in invalid state", service_resource)

            return False

        except Exception as ex:
            return False
    def _update_new_data_check_attachment(cls, res_id, new_content):
        """
        Update the list of data files for the external dataset resource

        @param res_id the ID of the external dataset resource
        @param new_content list of new files found on data host
        @throws InstrumentException if external dataset resource can't be found
        @retval the attachment ID
        """
        rr_cli = ResourceRegistryServiceClient()
        try:
            # Delete any attachments with the "NewDataCheck" keyword
            attachment_objs = rr_cli.find_attachments(resource_id=res_id, include_content=False, id_only=False)
            for attachment_obj in attachment_objs:
                kwds = set(attachment_obj.keywords)
                if 'NewDataCheck' in kwds:
                    log.debug('Delete NewDataCheck attachment: {0}'.format(attachment_obj._id))
                    rr_cli.delete_attachment(attachment_obj._id)
                else:
                    log.debug('Found attachment: {0}'.format(attachment_obj))

            # Create the new attachment
            att = Attachment(name='new_data_check', attachment_type=AttachmentType.ASCII, keywords=['NewDataCheck', ], content_type='text/plain', content=msgpack.packb(new_content))
            att_id = rr_cli.create_attachment(resource_id=res_id, attachment=att)

        except NotFound:
            raise InstrumentException('ExternalDatasetResource \'{0}\' not found'.format(res_id))

        return att_id
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # create missing data process definition
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
                            description="normally in preload",
                            module='ion.processes.data.transforms.logical_transform',
                            class_name='logical_transform')
        self.dataprocessclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
Beispiel #10
0
def helper_create_highcharts_workflow_def(container):

    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
    rrclient = ResourceRegistryServiceClient(node=container.node)

    # Check to see if the workflow defnition already exist
    workflow_def_ids,_ = rrclient.find_resources(restype=RT.WorkflowDefinition, name='Realtime_HighCharts', id_only=True)

    if len(workflow_def_ids) > 0:
        workflow_def_id = workflow_def_ids[0]
    else:
        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Realtime_HighCharts',description='Convert stream data to HighCharts data')

        #Add a transformation process definition
        procdef_id = helper_create_highcharts_data_process_definition(container)
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        from interface.services.ans.iworkflow_management_service import WorkflowManagementServiceClient
        workflowclient = WorkflowManagementServiceClient(node=container.node)

        workflow_def_id = workflowclient.create_workflow_definition(workflow_def_obj)

    return workflow_def_id
Beispiel #11
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
Beispiel #12
0
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            from pyon.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            #Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance,
                       'has_capability') and container_instance.has_capability(
                           'RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(
                    restype='Service', name=service_name)
            else:
                if not local_rr_only:
                    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                    rr_client = ResourceRegistryServiceClient(
                        container_instance.node)
                    service_resource, _ = rr_client.find_resources(
                        restype='Service', name=service_name)

            #The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and (
                    service_resource[0].state == ServiceStateEnum.READY
                    or service_resource[0].state == ServiceStateEnum.STEADY):
                return True

            return False

        except Exception, e:
            return False
Beispiel #13
0
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
Beispiel #14
0
    def is_service_available(self, service_name, local_rr_only=False):

        try:
            service_resource = None
            from pyon.core.bootstrap import container_instance
            from interface.objects import ServiceStateEnum
            #Use container direct RR connection if available, otherwise use messaging to the RR service
            if hasattr(container_instance, 'has_capability') and container_instance.has_capability('RESOURCE_REGISTRY'):
                service_resource, _ = container_instance.resource_registry.find_resources(restype='Service', name=service_name)
            else:
                if not local_rr_only:
                    from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
                    rr_client = ResourceRegistryServiceClient(container_instance.node)
                    service_resource, _ = rr_client.find_resources(restype='Service', name=service_name)

            #The service is available only of there is a single RR object for it and it is in one of these states:
            if service_resource and len(service_resource) > 1:
                log.warn("Found multiple service instances registered under name %s: %s", service_name, service_resource)

            if service_resource and ( service_resource[0].state == ServiceStateEnum.READY or service_resource[0].state == ServiceStateEnum.STEADY ):
                return True
            elif service_resource:
                log.warn("Call to is_service_available() failed although a Service resource exists: %s", service_resource)

            return False

        except Exception, e:
            return False
Beispiel #15
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.unsc = UserNotificationServiceClient(node=self.container.node)
        self.rrc = ResourceRegistryServiceClient(node=self.container.node)
        self.imc = IdentityManagementServiceClient(node=self.container.node)
Beispiel #16
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process = TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client,
                                  self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.ims_cli = IndexManagementServiceClient()
        self.rr_cli = ResourceRegistryServiceClient()
        self.index_name = 'test_index'
    def test_create_event_process(self):
        """
        Test creating an event process
        """
        process_definition = ProcessDefinition(name='test')
        process_definition.definition = ''

        rrc = ResourceRegistryServiceClient(node = self.container.node)
        process_definition_id = rrc.create(process_definition)

        self.mock_rr_client.find_objects = Mock()
        self.mock_rr_client.find_objects.return_value = ['stream_id_1'], 'obj_assoc_1'

#        self.mock_pd_client.schedule_process = Mock()
#        self.mock_pd_client.schedule_process.return_value = 'process_id'

        self.mock_rr_client.create_association = mocksignature(self.mock_rr_client.create_association)

        pid = self.event_management.create_event_process(process_definition_id=process_definition_id,
            event_types=['type_1', 'type_2'],
            sub_types=['subtype_1', 'subtype_2'],
            origins=['or_1', 'or_2'],
            origin_types=['t1', 't2'],
            out_data_products={'conductivity': 'id1'}
        )
Beispiel #21
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient()
        self.rrclient = ResourceRegistryServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.identcli = IdentityManagementServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        self.stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM')

        self.process_definitions = {}
        ingestion_worker_definition = ProcessDefinition(
            name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':
            'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class': 'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space = 'science_granule_ingestion'
        self.exchange_point = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(
            self.process_definitions['ingestion_worker'], configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)
class TestContainerExchangeToEms(IonIntegrationTestCase):
    # these tests should auto contact the EMS to do the work
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        # we want the ex manager to do its thing, but without actual calls to broker
        # just mock out the transport
        self.container.ex_manager._priviledged_transport = Mock(BaseTransport)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_talks_to_ems(self):
        self.patch_cfg('pyon.ion.exchange.CFG',
                       container=DotDict(CFG.container,
                                         exchange=DotDict(auto_register=True)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        # should have called EMS and set RR items
        res, _ = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(res[0].name, 'house')

        # should have tried to call broker as well
        self.assertEquals(
            self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_count, 1)
        self.assertIn(
            'house', self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_args[0][0])

    @patch.dict('pyon.ion.exchange.CFG',
                container=DotDict(CFG.container,
                                  exchange=DotDict(auto_register=False)))
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Test reaches into container, doesn\'t work with CEI')
    def test_create_xs_with_no_flag_only_uses_ex_manager(self):
        self.patch_cfg('pyon.ion.exchange.CFG',
                       container=DotDict(
                           CFG.container,
                           exchange=DotDict(auto_register=False)))

        xs = self.container.ex_manager.create_xs('house')
        self.addCleanup(xs.delete)

        e1, e2 = self.rr.find_resources(RT.ExchangeSpace, name='house')
        self.assertEquals(e1, [])
        self.assertEquals(e2, [])
        self.assertEquals(
            self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_count, 1)
        self.assertIn(
            'house', self.container.ex_manager._priviledged_transport.
            declare_exchange_impl.call_args[0][0])
Beispiel #23
0
    def setUp(self):

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.resource_registry = ResourceRegistryServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(node=self.container.node)
Beispiel #24
0
    def setUp(self):
        super(CatalogManagementIntTest,self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.cms_cli = CatalogManagementServiceClient()
        self.rr_cli  = ResourceRegistryServiceClient()
Beispiel #25
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever    = DataRetrieverServiceClient(node=self.container.node)

        self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)

        # Data async and subscription  TODO: Replace with new subscriber
        self._finished_count = None
        #TODO: Switch to gevent.queue.Queue
        self._async_finished_result = AsyncResult()
        self._finished_events_received = []
        self._finished_event_subscriber = None
        self._start_finished_event_subscriber()
        self.addCleanup(self._stop_finished_event_subscriber)


        self.DVR_CONFIG = {}
        self.DVR_CONFIG = {
            'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler',
            'dvr_cls' : 'SlocumDataHandler',
            }

        self._setup_resources()

        self.agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : {},
            'agent'         : {'resource_id': self.EDA_RESOURCE_ID},
            'test_mode' : True
        }

        datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance,  name='ExternalDatasetAgentInstance1', description='external data agent instance',
                                              handler_module=self.EDA_MOD, handler_class=self.EDA_CLS,
                                              dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config )
        self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj,
                                                                                                 external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID)


        #TG: Setup/configure the granule logger to log granules as they're published
        pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id)

        dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id)
        print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj


        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('datasetagentclient', name=pid,  process=FakeProcess())
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))
Beispiel #26
0
    def setUp(self):
        self._start_container()

        self._pp = pprint.PrettyPrinter()

        log.debug("oms_uri = %s", OMS_URI)
        self.oms = CIOMSClientFactory.create_instance(OMS_URI)

        self._get_platform_attributes()

        url = OmsTestMixin.start_http_server()
        log.info("TestPlatformInstrument:setup http url %s", url)

        result = self.oms.event.register_event_listener(url)
        log.info(
            "TestPlatformInstrument:setup register_event_listener result %s",
            result)

        #        response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall')
        #        log.info("TestPlatformInstrument:setup get_platform_ports %s", response)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.org_id = self.RR2.create(any_old(RT.Org))
        log.debug("Org created: %s", self.org_id)

        # see _set_receive_timeout
        self._receive_timeout = 177

        self.instrument_device = ''
        self.platform_device = ''
        self.platform_agent_instance_id = ''
        self._pa_client = ''

        def done():
            CIOMSClientFactory.destroy_instance(self.oms)
            event_notifications = OmsTestMixin.stop_http_server()
            log.info("event_notifications = %s" % str(event_notifications))

        self.addCleanup(done)
 def _new_dataset(self, stream_id):
     '''
     Adds a new dataset to the internal cache of the ingestion worker
     '''
     rr_client = ResourceRegistryServiceClient()
     datasets, _ = rr_client.find_subjects(subject_type=RT.Dataset,predicate=PRED.hasStream,object=stream_id,id_only=True)
     if datasets:
         return datasets[0]
     return None
Beispiel #28
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.generic_int_helper_rr = ResourceRegistryServiceClient(
            node=self.container.node)
        self.generic_int_helper_rr2 = EnhancedResourceRegistryClient(
            self.generic_int_helper_rr)
Beispiel #29
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.ingestion_management = IngestionManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.ingest_name = 'basic'
        self.exchange = 'testdata'
class TestIntDataProcessManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        #print 'started container'

        # Establish endpoint with container
        container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)
        #print 'got CC client'
        container_client.start_rel_from_url('res/deploy/r2sa.yml')
        
        print 'started services'

        # Now create client to DataProcessManagementService
        self.DPMSclient = DataProcessManagementServiceClient(node=self.container.node)
        self.RRclient = ResourceRegistryServiceClient(node=self.container.node)

    def test_createDataProcess(self):
        # test creating a new data process 
        print 'Creating new data process definition'
        dpd_obj = IonObject(RT.DataProcessDefinition, 
                            name='DPD1', 
                            description='some new dpd',
                            process_source='some_source_reference')
        try:
            dprocd_id = self.DPMSclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex: 
            self.fail("failed to create new data process definition: %s" %ex)
        print 'new dpd_id = ', dprocd_id
        
        print 'Creating new subscription'
        s_obj = IonObject(RT.Subscription, 
                            name='S1', 
                            description='some new subscription')
        try:
            s_id, version = self.RRclient.create(s_obj)
        except BadRequest as ex: 
            self.fail("failed to create new subscription: %s" %ex)
        print 'new s_id = ', s_id
        
        print 'Creating new data product'
        dprod_obj = IonObject(RT.Subscription, 
                            name='DProd1', 
                            description='some new data product')
        try:
            dprod_id, version = self.RRclient.create(dprod_obj)
        except BadRequest as ex: 
            self.fail("failed to create new data product: %s" %ex)
        print 'new dprod_id = ', dprod_id
        
        print 'Creating new data process'
        try:
            dproc_id = self.DPMSclient.create_data_process(dprocd_id, s_id, dprod_id)
        except (BadRequest, NotFound, Conflict) as ex: 
            self.fail("failed to create new data process definition: %s" %ex)
Beispiel #31
0
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()

        self.pdicts = {}
        self.queue_cleanup = list()
        self.exchange_cleanup = list()
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
    def setUp(self):
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.resource_registry = ResourceRegistryServiceClient()
        self.identity_management_service = IdentityManagementServiceClient()
        self.org_client = OrgManagementServiceClient()
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
    def __init__(self):
        self.ingestion_management = IngestionManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self._paused_streams = []
        self._w_covs = {}
        self._ro_covs = {}

        self._context_managed = False
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.ems = ExchangeManagementServiceClient()
        self.rr = ResourceRegistryServiceClient()

        # we want the ex manager to do its thing, but without actual calls to broker
        # just mock out the transport
        self.container.ex_manager._priviledged_transport = Mock(BaseTransport)
 def _find_new_data_check_attachment(self, res_id):
     rr_cli = ResourceRegistryServiceClient()
     try:
         attachment_objs = rr_cli.find_attachments(resource_id=res_id, include_content=False, id_only=False)
         for attachment_obj in attachment_objs:
             kwds = set(attachment_obj.keywords)
             if 'NewDataCheck' in kwds:
                 log.debug('Found NewDataCheck attachment: {0}'.format(attachment_obj._id))
                 return attachment_obj.content
             else:
                 log.debug('Found attachment: {0}'.format(attachment_obj))
     except NotFound:
         raise InstrumentException('ExternalDatasetResource \'{0}\' not found'.format(res_id))
    def _start_raw_ingestion(self):
        dpsc_cli = DataProductManagementServiceClient()
        rrclient = ResourceRegistryServiceClient()
        RR2 = EnhancedResourceRegistryClient(rrclient)

        # Generic time-series data domain creation
        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom.dump(),
            spatial_domain = sdom.dump())

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        log.info("Create data product... raw stream id: %s", self._raw_stream_id)
        dp_id = dpsc_cli.create_data_product_(data_product= dp_obj)
        dataset_id = self.create_dataset(self._raw_stream_pdict_id)
        RR2.assign_stream_definition_to_data_product_with_has_stream_definition(self._raw_stream_def_id, dp_id)
        RR2.assign_stream_to_data_product_with_has_stream(self._raw_stream_id, dp_id)
        RR2.assign_dataset_to_data_product_with_has_dataset(dataset_id, dp_id)
        self._raw_dataset_id = dataset_id

        log.info("Create data product...Complete")

        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        # Assert that the data product has an associated stream def at this stage
        stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True)
        self.assertNotEquals(len(stream_ids), 0)

        log.info("Activate data product persistence")
        dpsc_cli.activate_data_product_persistence(dp_id)

        log.info("Read data product")
        dp_obj = dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Created data product %s', dp_obj)
 def clean_subscriptions():
     ingestion_management = IngestionManagementServiceClient()
     pubsub = PubsubManagementServiceClient()
     rr     = ResourceRegistryServiceClient()
     ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True)
     for ic in ingestion_config_ids:
         subscription_ids, assocs = rr.find_objects(subject=ic, predicate=PRED.hasSubscription, id_only=True)
         for subscription_id, assoc in zip(subscription_ids, assocs):
             rr.delete_association(assoc)
             try:
                 pubsub.deactivate_subscription(subscription_id)
             except:
                 log.exception("Unable to decativate subscription: %s", subscription_id)
             pubsub.delete_subscription(subscription_id)
    def clean_subscriptions():
        ingestion_management = IngestionManagementServiceClient()
        pubsub = PubsubManagementServiceClient()
        rr = ResourceRegistryServiceClient()
        ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True)
        for ic in ingestion_config_ids:

            assocs = rr.find_associations(subject=ic, predicate=PRED.hasSubscription, id_only=False)
            for assoc in assocs:
                rr.delete_association(assoc)
                try:
                    pubsub.deactivate_subscription(assoc.o)
                except:
                    pass
                pubsub.delete_subscription(assoc.o)
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.unsc = UserNotificationServiceClient(node=self.container.node)
        self.rrc = ResourceRegistryServiceClient(node=self.container.node)
        self.imc = IdentityManagementServiceClient(node=self.container.node)
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        
        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()

        self.egg_url_good = "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1a-py2.7.egg"
        self.egg_url_bad  = "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.1a-py2.7.egg"
        self.egg_url_404  = "http://sddevrepo.oceanobservatories.org/releases/completely_made_up_404.egg"
    def setUp(self):
        super(DataRetrieverIntTestAlpha,self).setUp()


        self._start_container()
        config = DotDict()
        config.bootstrap.processes.ingestion.module = 'ion.processes.data.ingestion.ingestion_worker_a'
        config.bootstrap.processes.replay.module    = 'ion.processes.data.replay.replay_process_a'
        self.container.start_rel_from_url('res/deploy/r2dm.yml', config)


        self.datastore_name = 'test_datasets'
        self.datastore      = self.container.datastore_manager.get_datastore(self.datastore_name, profile=DataStore.DS_PROFILE.SCIDATA)

        self.data_retriever     = DataRetrieverServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.resource_registry  = ResourceRegistryServiceClient()

        xs_dot_xp = CFG.core_xps.science_data

        try:
            self.XS, xp_base = xs_dot_xp.split('.')
            self.XP = '.'.join([get_sys_name(), xp_base])
        except ValueError:
            raise StandardError('Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp)
Beispiel #44
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.generic_int_helper_rr  = ResourceRegistryServiceClient(node=self.container.node)
        self.generic_int_helper_rr2 = EnhancedResourceRegistryClient(self.generic_int_helper_rr)
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        print 'started services'

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.DP   = DataProductManagementServiceClient(node=self.container.node)
        self.PSC  = PubsubManagementServiceClient(node=self.container.node)
        self.PDC  = ProcessDispatcherServiceClient(node=self.container.node)
        self.DSC  = DatasetManagementServiceClient()
        self.IDS  = IdentityManagementServiceClient(node=self.container.node)
        self.RR2  = EnhancedResourceRegistryClient(self.RR)


        # Use the network definition provided by RSN OMS directly.
        rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
        self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
        # get serialized version for the configuration:
        self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
        if log.isEnabledFor(logging.TRACE):
            log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser)


        self._async_data_result = AsyncResult()
        self._data_subscribers = []
        self._samples_received = []
        self.addCleanup(self._stop_data_subscribers)

        self._async_event_result = AsyncResult()
        self._event_subscribers = []
        self._events_received = []
        self.addCleanup(self._stop_event_subscribers)
        self._start_event_subscriber()
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        # create missing data process definition
        self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
                            description="normally in preload",
                            module='ion.processes.data.transforms.logical_transform',
                            class_name='logical_transform')
        self.dsmsclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
    def setUp(self):
        # set up the container
        self._start_container()

        self.cc = ContainerAgentClient(node=self.container.node,name=self.container.name)

        self.cc.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.cc.node)
        self.tms_cli = TransformManagementServiceClient(node=self.cc.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.cc.node)

        self.input_stream = IonObject(RT.Stream,name='ctd1 output', description='output from a ctd')
        self.input_stream.original = True
        self.input_stream.mimetype = 'hdf'
        self.input_stream_id = self.pubsub_cli.create_stream(self.input_stream)

        self.input_subscription = IonObject(RT.Subscription,name='ctd1 subscription', description='subscribe to this if you want ctd1 data')
        self.input_subscription.query['stream_id'] = self.input_stream_id
        self.input_subscription.exchange_name = 'a queue'
        self.input_subscription_id = self.pubsub_cli.create_subscription(self.input_subscription)

        self.output_stream = IonObject(RT.Stream,name='transform output', description='output from the transform process')
        self.output_stream.original = True
        self.output_stream.mimetype='raw'
        self.output_stream_id = self.pubsub_cli.create_stream(self.output_stream)


        self.process_definition = IonObject(RT.ProcessDefinition,name='transform_process')
        self.process_definition.executable = {'module': 'ion.services.dm.transformation.example.transform_example',
                                              'class':'TransformExample'}
        self.process_definition_id, _= self.rr_cli.create(self.process_definition)
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2coi.yml")

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.pubsubclient =  PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.imsclient = InstrumentManagementServiceClient()
        self.dataproductclient = DataProductManagementServiceClient()
        self.dataprocessclient = DataProcessManagementServiceClient()
        self.datasetclient =  DatasetManagementServiceClient()
        self.dataset_management = self.datasetclient
        self.process_dispatcher = ProcessDispatcherServiceClient()

    def test_create_data_process_definition(self):
        pfunc = ParameterFunction(name='test_func')
        func_id = self.dataset_management.create_parameter_function(pfunc)

        data_process_definition = DataProcessDefinition()
        data_process_definition.name = 'Simple'

        dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition, func_id)
        self.addCleanup(self.dataprocessclient.delete_data_process_definition, dpd_id)
        self.addCleanup(self.dataset_management.delete_parameter_function, func_id)

        objs, _ = self.rrclient.find_objects(dpd_id, PRED.hasParameterFunction, id_only=False)
        self.assertEquals(len(objs), 1)
        self.assertIsInstance(objs[0], ParameterFunction)
Beispiel #54
0
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        
        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2dm.yml")

        self.ims_cli = IndexManagementServiceClient()
        self.rr_cli = ResourceRegistryServiceClient()
        self.index_name = "test_index"
    def setUp(self):
        # set up the container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2dm.yml")

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        self.tms_cli = TransformManagementServiceClient(node=self.container.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
        self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.input_stream_id = self.pubsub_cli.create_stream(name="input_stream", original=True)

        self.input_subscription_id = self.pubsub_cli.create_subscription(
            query=StreamQuery(stream_ids=[self.input_stream_id]),
            exchange_name="transform_input",
            name="input_subscription",
        )

        self.output_stream_id = self.pubsub_cli.create_stream(name="output_stream", original=True)

        self.process_definition = ProcessDefinition(name="basic_transform_definition")
        self.process_definition.executable = {
            "module": "ion.processes.data.transforms.transform_example",
            "class": "TransformExample",
        }
        self.process_definition_id = self.procd_cli.create_process_definition(
            process_definition=self.process_definition
        )
    def setUp(self):
        # Start container
        super(TestActivateInstrumentIntegration, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(TestActivateInstrumentIntegration.es_cleanup)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.usernotificationclient = UserNotificationServiceClient()

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
    def _on_init(self):
        ResourceAgent._on_init(self)

#        log.debug(">>funcs:\n%s" % dir(self))
#        log.debug(">>type:\n%s" % type(self))
#        log.debug(">>CFG:\n%s" % self.CFG)
#        log.debug(">>_proc_type:\n%s" % self._proc_type)

        ext_dataset_id = self.CFG.get('process',{}).get('eoa',{}).get('dataset_id','')
        log.debug(">>ds_id:\n\t%s" % ext_dataset_id)
        self.resreg_cli = ResourceRegistryServiceClient()
        self.pubsub_cli = PubsubManagementServiceClient()


        # retrieve all the ExternalDataset resource associated resources
        ext_ds_res = self.resreg_cli.read(object_id=ext_dataset_id)
        log.debug("Retrieved ExternalDataset: %s" % ext_ds_res)

        dsrc_res, dsrc_acc = self.resreg_cli.find_objects(subject=ext_dataset_id, predicate=PRED.hasSource, object_type=RT.DataSource)
        dsrc_res = dsrc_res[0]
        dsrc_id = dsrc_acc[0].o
        log.debug("Found associated DataSource: %s" % dsrc_id)

        edp_res, edp_acc = self.resreg_cli.find_objects(subject=dsrc_id, predicate=PRED.hasProvider, object_type=RT.ExternalDataProvider)
        edp_res = edp_res[0]
        edp_id = edp_acc[0].o
        log.debug("Found associated ExternalDataProvider: %s" % edp_id)

        mdl_res, mdl_acc = self.resreg_cli.find_objects(subject=dsrc_id, predicate=PRED.hasModel, object_type=RT.ExternalDataSourceModel)
        mdl_res = mdl_res[0]
        mdl_id = mdl_acc[0].o
        log.debug("Found associated ExternalDataSourceModel: %s" % mdl_id)

        dprod_id, _ = self.resreg_cli.find_objects(subject=ext_dataset_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True)
        dprod_id = dprod_id[0]
        log.debug("Found associated DataProducer: %s" % dprod_id)

        stream_id, _ = self.resreg_cli.find_objects(subject=dprod_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]
        log.debug("Found associated Stream: %s" % stream_id)



        # configure the stream publisher
        log.debug("Configure StreamPublisher")
        stream_route = self.pubsub_cli.read_stream(stream_id=stream_id)
        log.debug("StreamRoute: %s" % stream_route)

        self._stream_publisher = StreamPublisher(stream_route)

        # instantiate the data_handler instance
        modpath = mdl_res.data_handler_module
        log.debug("ExternalDataHandler module: %s" % modpath)
        classname = mdl_res.data_handler_class
        log.debug("ExternalDataHandler class: %s" % classname)
        module = __import__(modpath, fromlist=[classname])
        classobj = getattr(module, classname)
        self._data_handler = classobj(data_provider=edp_res, data_source=dsrc_res, ext_dataset=ext_ds_res)
        assert isinstance(self._data_handler, BaseExternalDataHandler), "Instantiated service not a BaseExternalDataHandler %r" % self._data_handler
    def setUp(self):

        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2coi.yml")

        self.resource_registry = ResourceRegistryServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(node=self.container.node)