def _start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # A callback for processing subscribed-to data.
        def consume_data(message, headers):
            log.info("Subscriber received data message: %s.", str(message))
            self._samples_received.append(message)
            if self._no_samples and self._no_samples == len(self._samples_received):
                self._async_data_result.set()

        # Create a stream subscriber registrar to create subscribers.
        subscriber_registrar = StreamSubscriberRegistrar(process=self.container, node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}
        self._data_subscribers = []
        for (stream_name, val) in PACKET_CONFIG.iteritems():
            stream_def = ctd_stream_definition(stream_id=None)
            stream_def_id = pubsub_client.create_stream_definition(container=stream_def)
            stream_id = pubsub_client.create_stream(
                name=stream_name, stream_definition_id=stream_def_id, original=True, encoding="ION R2"
            )
            self._stream_config[stream_name] = stream_id

            # Create subscriptions for each stream.
            exchange_name = "%s_queue" % stream_name
            sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name, callback=consume_data)
            self._listen(sub)
            self._data_subscribers.append(sub)
            query = StreamQuery(stream_ids=[stream_id])
            sub_id = pubsub_client.create_subscription(query=query, exchange_name=exchange_name)
            pubsub_client.activate_subscription(sub_id)
    def _trigger_func(self, stream_id):

        point_def = ctd_stream_definition(stream_id=stream_id)
        point_constructor = PointSupplementConstructor(point_definition=point_def)

        while True:

            length = 1

            c = [random.uniform(0.0,75.0)  for i in xrange(length)]

            t = [random.uniform(-1.7, 21.0) for i in xrange(length)]

            p = [random.lognormvariate(1,2) for i in xrange(length)]

            lat = [random.uniform(-90.0, 90.0) for i in xrange(length)]

            lon = [random.uniform(0.0, 360.0) for i in xrange(length)]

            tvar = [self.last_time + i for i in xrange(1,length+1)]

            self.last_time = max(tvar)

            point_id = point_constructor.add_point(time=tvar,location=(lon[0],lat[0]))
            point_constructor.add_point_coverage(point_id=point_id, coverage_id='temperature', values=t)
            point_constructor.add_point_coverage(point_id=point_id, coverage_id='pressure', values=p)
            point_constructor.add_point_coverage(point_id=point_id, coverage_id='conductivity', values=c)

            ctd_packet = point_constructor.get_stream_granule()

            log.info('SimpleCtdPublisher sending %d values!' % length)
            self.publisher.publish(ctd_packet)

            time.sleep(1.0)
    def test_activate_suspend_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        ctd_stream_def = ctd_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('test_createDataProduct: Creating new data product w/o a stream definition (L4-CI-SA-RQ-308)')

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------
        log.debug("parameter dictionary: %s" % parameter_dictionary)

        dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id,
            parameter_dictionary= parameter_dictionary)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        log.debug('new dp_id = %s' % dp_id)
        log.debug("test_createDataProduct: Data product info from registry %s (L4-CI-SA-RQ-308)", str(dp_obj))

        #------------------------------------------------------------------------------------------------
        # test activate and suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.activate_data_product_persistence(dp_id)

        #------------------------------------------------------------------------------------------------
        # test suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.suspend_data_product_persistence(dp_id)
    def _start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # A callback for processing subscribed-to data.
        def consume_data(message, headers):
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            if self._no_samples and self._no_samples == len(self._samples_received):
                self._async_data_result.set()
                
        # Create a stream subscriber registrar to create subscribers.
        subscriber_registrar = StreamSubscriberRegistrar(process=self.container,
                                                container=self.container)

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}
        self._data_subscribers = []
        # TODO the following is a mininal adjustment to at least let the test
        # continue:
#        for (stream_name, val) in PACKET_CONFIG.iteritems():
        for stream_name in PACKET_CONFIG:
            stream_def = ctd_stream_definition(stream_id=None)
            stream_def_id = pubsub_client.create_stream_definition(
                                                    container=stream_def)        
            stream_id = pubsub_client.create_stream(
                        name=stream_name,
                        stream_definition_id=stream_def_id,
                        original=True,
                        encoding='ION R2')

            taxy = get_taxonomy(stream_name)
            stream_config = dict(
                id=stream_id,
                taxonomy=taxy.dump()
            )
            self._stream_config[stream_name] = stream_config
#            self._stream_config[stream_name] = stream_id

            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name,
                                                         callback=consume_data)
            self._listen(sub)
            self._data_subscribers.append(sub)
            query = StreamQuery(stream_ids=[stream_id])
            sub_id = pubsub_client.create_subscription(
                                query=query, exchange_name=exchange_name, exchange_point='science_data')
            pubsub_client.activate_subscription(sub_id)
Ejemplo n.º 5
0
    def _trigger_func(self, stream_id):

        point_def = ctd_stream_definition(stream_id=stream_id)
        point_constructor = PointSupplementConstructor(
            point_definition=point_def)

        while True:

            length = 1

            c = [random.uniform(0.0, 75.0) for i in xrange(length)]

            t = [random.uniform(-1.7, 21.0) for i in xrange(length)]

            p = [random.lognormvariate(1, 2) for i in xrange(length)]

            lat = [random.uniform(-90.0, 90.0) for i in xrange(length)]

            lon = [random.uniform(0.0, 360.0) for i in xrange(length)]

            tvar = [self.last_time + i for i in xrange(1, length + 1)]

            self.last_time = max(tvar)

            point_id = point_constructor.add_point(time=tvar,
                                                   location=(lon[0], lat[0]))
            point_constructor.add_point_coverage(point_id=point_id,
                                                 coverage_id='temperature',
                                                 values=t)
            point_constructor.add_point_coverage(point_id=point_id,
                                                 coverage_id='pressure',
                                                 values=p)
            point_constructor.add_point_coverage(point_id=point_id,
                                                 coverage_id='conductivity',
                                                 values=c)

            ctd_packet = point_constructor.get_stream_granule()

            log.info('SimpleCtdPublisher sending %d values!' % length)
            self.publisher.publish(ctd_packet)

            time.sleep(2.0)
Ejemplo n.º 6
0
    def _start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # A callback for processing subscribed-to data.
        def consume_data(message, headers):
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            if self._no_samples and self._no_samples == len(self._samples_received):
                self._async_data_result.set()
                
        # Create a stream subscriber registrar to create subscribers.
        subscriber_registrar = StreamSubscriberRegistrar(process=self.container,
                                                node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}
        self._data_subscribers = []
        for (stream_name, val) in PACKET_CONFIG.iteritems():
            stream_def = ctd_stream_definition(stream_id=None)
            stream_def_id = pubsub_client.create_stream_definition(
                                                    container=stream_def)        
            stream_id = pubsub_client.create_stream(
                        name=stream_name,
                        stream_definition_id=stream_def_id,
                        original=True,
                        encoding='ION R2')
            self._stream_config[stream_name] = stream_id
            
            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name,
                                                         callback=consume_data)
            self._listen(sub)
            self._data_subscribers.append(sub)
            query = StreamQuery(stream_ids=[stream_id])
            sub_id = pubsub_client.create_subscription(\
                                query=query, exchange_name=exchange_name)
            pubsub_client.activate_subscription(sub_id)
Ejemplo n.º 7
0
def instrument_test_driver(container):

    org_client = OrgManagementServiceClient(node=container.node)
    id_client = IdentityManagementServiceClient(node=container.node)

    system_actor = id_client.find_actor_identity_by_name(name=CFG.system.system_actor)
    log.info('system actor:' + system_actor._id)

    sa_header_roles = get_role_message_headers(org_client.find_all_roles_by_user(system_actor._id))


    # Names of agent data streams to be configured.
    parsed_stream_name = 'ctd_parsed'
    raw_stream_name = 'ctd_raw'

    # Driver configuration.
    #Simulator

    driver_config = {
        'svr_addr': 'localhost',
        'cmd_port': 5556,
        'evt_port': 5557,
        'dvr_mod': 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
        'dvr_cls': 'SBE37Driver',
        'comms_config': {
            SBE37Channel.CTD: {
                'method':'ethernet',
                'device_addr': CFG.device.sbe37.host,
                'device_port': CFG.device.sbe37.port,
                'server_addr': 'localhost',
                'server_port': 8888
            }
        }
    }

    #Hardware

    _container_client = ContainerAgentClient(node=container.node,
        name=container.name)

# Create a pubsub client to create streams.
    _pubsub_client = PubsubManagementServiceClient(node=container.node)

    # A callback for processing subscribed-to data.
    def consume(message, headers):
        log.info('Subscriber received message: %s', str(message))

    # Create a stream subscriber registrar to create subscribers.
    subscriber_registrar = StreamSubscriberRegistrar(process=container,
        node=container.node)

    subs = []

    # Create streams for each stream named in driver.
    stream_config = {}
    for (stream_name, val) in PACKET_CONFIG.iteritems():
        stream_def = ctd_stream_definition(stream_id=None)
        stream_def_id = _pubsub_client.create_stream_definition(
            container=stream_def)
        stream_id = _pubsub_client.create_stream(
            name=stream_name,
            stream_definition_id=stream_def_id,
            original=True,
            encoding='ION R2', headers={'ion-actor-id': system_actor._id, 'ion-actor-roles': sa_header_roles })
        stream_config[stream_name] = stream_id

        # Create subscriptions for each stream.
        exchange_name = '%s_queue' % stream_name
        sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name, callback=consume)
        sub.start()
        query = StreamQuery(stream_ids=[stream_id])
        sub_id = _pubsub_client.create_subscription(\
            query=query, exchange_name=exchange_name )
        _pubsub_client.activate_subscription(sub_id)
        subs.append(sub)


    # Create agent config.

    agent_resource_id = '123xyz'

    agent_config = {
        'driver_config' : driver_config,
        'stream_config' : stream_config,
        'agent'         : {'resource_id': agent_resource_id}
    }

    # Launch an instrument agent process.
    _ia_name = 'agent007'
    _ia_mod = 'ion.agents.instrument.instrument_agent'
    _ia_class = 'InstrumentAgent'
    _ia_pid = _container_client.spawn_process(name=_ia_name,
        module=_ia_mod, cls=_ia_class,
        config=agent_config)


    log.info('got pid=%s for resource_id=%s' % (str(_ia_pid), str(agent_resource_id)))
    def setUp(self):
        """
        Setup the test environment to exersice use of instrumet agent, including:
        * define driver_config parameters.
        * create container with required services and container client.
        * create publication stream ids for each driver data stream.
        * create stream_config parameters.
        * create and activate subscriptions for agent data streams.
        * spawn instrument agent process and create agent client.
        * add cleanup functions to cause subscribers to get stopped.
        """

        #       params = { ('CTD', 'TA2'): -1.9434316e-05,
        #       ('CTD', 'PTCA1'): 1.3206866,
        #       ('CTD', 'TCALDATE'): [8, 11, 2006] }

        #       for tup in params:
        #           print tup

        self.addCleanup(self.customCleanUp)
        # Names of agent data streams to be configured.
        parsed_stream_name = 'ctd_parsed'
        raw_stream_name = 'ctd_raw'

        # Driver configuration.
        #Simulator

        self.driver_config = {
            'svr_addr': 'localhost',
            'cmd_port': 5556,
            'evt_port': 5557,
            'dvr_mod': 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'comms_config': {
                SBE37Channel.CTD: {
                    'method': 'ethernet',
                    'device_addr': CFG.device.sbe37.host,
                    'device_port': CFG.device.sbe37.port,
                    'server_addr': 'localhost',
                    'server_port': 8888
                }
            }
        }

        #Hardware
        '''
        self.driver_config = {
            'svr_addr': 'localhost',
            'cmd_port': 5556,
            'evt_port': 5557,
            'dvr_mod': 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'comms_config': {
                SBE37Channel.CTD: {
                    'method':'ethernet',
                    'device_addr': '137.110.112.119',
                    'device_port': 4001,
                    'server_addr': 'localhost',
                    'server_port': 8888
                }
            }
        }
        '''

        # Start container.
        self._start_container()

        # Establish endpoint with container (used in tests below)
        self._container_client = ContainerAgentClient(node=self.container.node,
                                                      name=self.container.name)

        # Bring up services in a deploy file (no need to message)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        # Create a pubsub client to create streams.
        self._pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)

        # A callback for processing subscribed-to data.
        def consume(message, headers):
            log.info('Subscriber received message: %s', str(message))

        # Create a stream subscriber registrar to create subscribers.
        subscriber_registrar = StreamSubscriberRegistrar(
            process=self.container, node=self.container.node)

        self.subs = []

        # Create streams for each stream named in driver.
        self.stream_config = {}
        for (stream_name, val) in PACKET_CONFIG.iteritems():
            stream_def = ctd_stream_definition(stream_id=None)
            stream_def_id = self._pubsub_client.create_stream_definition(
                container=stream_def)
            stream_id = self._pubsub_client.create_stream(
                name=stream_name,
                stream_definition_id=stream_def_id,
                original=True,
                encoding='ION R2')
            self.stream_config[stream_name] = stream_id

            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            sub = subscriber_registrar.create_subscriber(
                exchange_name=exchange_name, callback=consume)
            sub.start()
            query = StreamQuery(stream_ids=[stream_id])
            sub_id = self._pubsub_client.create_subscription(\
                                query=query, exchange_name=exchange_name)
            self._pubsub_client.activate_subscription(sub_id)
            self.subs.append(sub)

        # Add cleanup function to stop subscribers.
        def stop_subscriber(sub_list):
            for sub in sub_list:
                sub.stop()

        self.addCleanup(stop_subscriber, self.subs)

        # Create agent config.

        self.agent_resource_id = '123xyz'

        self.agent_config = {
            'driver_config': self.driver_config,
            'stream_config': self.stream_config,
            'agent': {
                'resource_id': self.agent_resource_id
            }
        }

        # Launch an instrument agent process.
        self._ia_name = 'agent007'
        self._ia_mod = 'ion.agents.instrument.instrument_agent'
        self._ia_class = 'InstrumentAgent'
        self._ia_pid = self._container_client.spawn_process(
            name=self._ia_name,
            module=self._ia_mod,
            cls=self._ia_class,
            config=self.agent_config)

        log.info('got pid=%s', str(self._ia_pid))

        self._ia_client = None
        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(self.agent_resource_id,
                                              process=FakeProcess())
        log.info('got ia client %s', str(self._ia_client))
    def test_createDataProduct(self):
        client = self.client

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = ctd_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')

        # test creating a new data product w/o a stream definition
        print 'test_createDataProduct: Creating new data product w/o a stream definition (L4-CI-SA-RQ-308)'
        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')
        try:
            dp_id = client.create_data_product(dp_obj, '')
            dp_obj = client.read_data_product(dp_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        print 'new dp_id = ', dp_id
        log.debug("test_createDataProduct: Data product info from registry %s (L4-CI-SA-RQ-308)", str(dp_obj))


        # test creating a new data product with  a stream definition
        print 'Creating new data product with a stream definition'
        dp_obj = IonObject(RT.DataProduct,
                           name='DP2',
                           description='some new dp')
        try:
            dp_id2 = client.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        print 'new dp_id = ', dp_id2

        # test activate and suspend data product persistence
        try:
            client.activate_data_product_persistence(dp_id2, persist_data=True, persist_metadata=True)
            time.sleep(3)
            client.suspend_data_product_persistence(dp_id2)
        except BadRequest as ex:
            self.fail("failed to activate / deactivate data product persistence : %s" %ex)




        pid = self.container.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = self.container.proc_manager.procs[pid]
        '''
        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=self.container.node)
        self.ctd_stream1_publisher = publisher_registrar.create_publisher(stream_id=self.in_stream_id)

        msg = {'num':'3'}
        self.ctd_stream1_publisher.publish(msg)

        time.sleep(1)

        msg = {'num':'5'}
        self.ctd_stream1_publisher.publish(msg)

        time.sleep(1)

        msg = {'num':'9'}
        self.ctd_stream1_publisher.publish(msg)
        '''










        # test creating a duplicate data product
        print 'Creating the same data product a second time (duplicate)'
        dp_obj.description = 'the first dp'
        try:
            dp_id = client.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            print ex
        else:
            self.fail("duplicate data product was created with the same name")


        # test reading a non-existent data product
        print 'reading non-existent data product'
        try:
            dp_obj = client.read_data_product('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data product was found during read: %s" %dp_obj)

        # update a data product (tests read also)
        print 'Updating data product'
        # first get the existing dp object
        try:
            dp_obj = client.read_data_product(dp_id)
        except NotFound as ex:
            self.fail("existing data product was not found during read")
        else:
            pass
            #print 'dp_obj = ', dp_obj
        # now tweak the object
        dp_obj.description = 'the very first dp'
        # now write the dp back to the registry
        try:
            update_result = client.update_data_product(dp_obj)
        except NotFound as ex:
            self.fail("existing data product was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data product update")

        # now get the dp back to see if it was updated
        try:
            dp_obj = client.read_data_product(dp_id)
        except NotFound as ex:
            self.fail("existing data product was not found during read")
        else:
            pass
            #print 'dp_obj = ', dp_obj
        self.assertTrue(dp_obj.description == 'the very first dp')

        # now 'delete' the data product
        print "deleting data product: ", dp_id
        try:
            client.delete_data_product(dp_id)
        except NotFound as ex:
            self.fail("existing data product was not found during delete")

        # now try to get the deleted dp object
        try:
            dp_obj = client.read_data_product(dp_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data product was found during read")

        # now try to delete the already deleted dp object
        print "deleting non-existing data product"
        try:
            client.delete_data_product(dp_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data product was found during delete")
    def setUp(self):
        """
        Setup the test environment to exersice use of instrumet agent, including:
        * define driver_config parameters.
        * create container with required services and container client.
        * create publication stream ids for each driver data stream.
        * create stream_config parameters.
        * create and activate subscriptions for agent data streams.
        * spawn instrument agent process and create agent client.
        * add cleanup functions to cause subscribers to get stopped.
        """


 #       params = { ('CTD', 'TA2'): -1.9434316e-05,
 #       ('CTD', 'PTCA1'): 1.3206866,
 #       ('CTD', 'TCALDATE'): [8, 11, 2006] }

 #       for tup in params:
 #           print tup




        self.addCleanup(self.customCleanUp)
        # Names of agent data streams to be configured.
        parsed_stream_name = 'ctd_parsed'        
        raw_stream_name = 'ctd_raw'        

        # Driver configuration.
        #Simulator

        self.driver_config = {
            'svr_addr': 'localhost',
            'cmd_port': 5556,
            'evt_port': 5557,
            'dvr_mod': 'ion.services.mi.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'comms_config': {
                SBE37Channel.CTD: {
                    'method':'ethernet',
                    'device_addr': CFG.device.sbe37.host,
                    'device_port': CFG.device.sbe37.port,
                    'server_addr': 'localhost',
                    'server_port': 8888
                }                
            }
        }

        #Hardware

        '''
        self.driver_config = {
            'svr_addr': 'localhost',
            'cmd_port': 5556,
            'evt_port': 5557,
            'dvr_mod': 'ion.services.mi.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'comms_config': {
                SBE37Channel.CTD: {
                    'method':'ethernet',
                    'device_addr': '137.110.112.119',
                    'device_port': 4001,
                    'server_addr': 'localhost',
                    'server_port': 8888
                }
            }
        }
        '''

        # Start container.
        self._start_container()

        # Establish endpoint with container (used in tests below)
        self._container_client = ContainerAgentClient(node=self.container.node,
                                                      name=self.container.name)
        
        # Bring up services in a deploy file (no need to message)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        # Create a pubsub client to create streams.
        self._pubsub_client = PubsubManagementServiceClient(
                                                    node=self.container.node)

        # A callback for processing subscribed-to data.
        def consume(message, headers):
            log.info('Subscriber received message: %s', str(message))

        # Create a stream subscriber registrar to create subscribers.
        subscriber_registrar = StreamSubscriberRegistrar(process=self.container,
                                                         container=self.container)

        self.subs = []

        # Create streams for each stream named in driver.
        self.stream_config = {}
        for (stream_name, val) in PACKET_CONFIG.iteritems():
            stream_def = ctd_stream_definition(stream_id=None)
            stream_def_id = self._pubsub_client.create_stream_definition(
                                                    container=stream_def)        
            stream_id = self._pubsub_client.create_stream(
                        name=stream_name,
                        stream_definition_id=stream_def_id,
                        original=True,
                        encoding='ION R2')
            self.stream_config[stream_name] = stream_id
            
            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name, callback=consume)
            sub.start()
            query = StreamQuery(stream_ids=[stream_id])
            sub_id = self._pubsub_client.create_subscription(\
                                query=query, exchange_name=exchange_name)
            self._pubsub_client.activate_subscription(sub_id)
            self.subs.append(sub)
            
        # Add cleanup function to stop subscribers.        
        def stop_subscriber(sub_list):
            for sub in sub_list:
                sub.stop()            
        self.addCleanup(stop_subscriber, self.subs)            
            

        # Create agent config.

        self.agent_resource_id = '123xyz'

        self.agent_config = {
            'driver_config' : self.driver_config,
            'stream_config' : self.stream_config,
            'agent'         : {'resource_id': self.agent_resource_id}
        }

        # Launch an instrument agent process.
        self._ia_name = 'agent007'
        self._ia_mod = 'ion.services.mi.instrument_agent'
        self._ia_class = 'InstrumentAgent'
        self._ia_pid = self._container_client.spawn_process(name=self._ia_name,
                                       module=self._ia_mod, cls=self._ia_class,
                                       config=self.agent_config)


        log.info('got pid=%s', str(self._ia_pid))


        self._ia_client = None
        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(self.agent_resource_id, process=FakeProcess())
        log.info('got ia client %s', str(self._ia_client))
Ejemplo n.º 11
0
from nose.plugins.attrib import attr
from mi.instrument.seabird.sbe26plus.test.test_driver import SBE26PlusUnitFromIDK
from mi.instrument.seabird.sbe26plus.test.test_driver import SBE26PlusIntFromIDK
from mi.instrument.seabird.sbe26plus.test.test_driver import SBE26PlusQualFromIDK
from mi.idk.unit_test import InstrumentDriverTestCase
from mi.instrument.seabird.sbe26plus.ooicore.driver import PACKET_CONFIG
from prototype.sci_data.stream_defs import ctd_stream_definition

InstrumentDriverTestCase.initialize(
    driver_module='mi.instrument.seabird.sbe26plus.ooicore.driver',
    driver_class="InstrumentDriver",

    instrument_agent_resource_id = '123xyz',
    instrument_agent_name = 'seabird_sbe26plus_ooicore',
    instrument_agent_packet_config = PACKET_CONFIG,
    instrument_agent_stream_definition = ctd_stream_definition(stream_id=None)
)

###############################################################################
#                                UNIT TESTS                                   #
#         Unit tests test the method calls and parameters using Mock.         #
###############################################################################
@attr('UNIT', group='mi')
class UnitFromIDK(SBE26PlusUnitFromIDK):
    """

    """

###############################################################################
#                            INTEGRATION TESTS                                #
#     Integration test test the direct driver / instrument interaction        #
    def test_create_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        ctd_stream_def = ctd_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('test_createDataProduct: Creating new data product w/o a stream definition (L4-CI-SA-RQ-308)')

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------
        log.debug("parameter dictionary: %s" % parameter_dictionary)

        dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj,
                                            stream_definition_id=ctd_stream_def_id,
                                            parameter_dictionary= parameter_dictionary)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        log.debug('new dp_id = %s' % dp_id)
        log.debug("test_createDataProduct: Data product info from registry %s (L4-CI-SA-RQ-308)", str(dp_obj))


        #------------------------------------------------------------------------------------------------
        # test creating a new data product with  a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('Creating new data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
            name='DP2',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)
        log.debug('new dp_id = %s' % dp_id2)

        #------------------------------------------------------------------------------------------------
        #make sure data product is associated with stream def
        #------------------------------------------------------------------------------------------------
        streamdefs = []
        streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True)
        for s in streams:
            log.debug("Checking stream %s" % s)
            sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True)
            for sd in sdefs:
                log.debug("Checking streamdef %s" % sd)
                streamdefs.append(sd)
        self.assertIn(ctd_stream_def_id, streamdefs)


        # test reading a non-existent data product
        log.debug('reading non-existent data product')

        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product('some_fake_id')

        # update a data product (tests read also)
        log.debug('Updating data product')
        # first get the existing dp object
        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # now tweak the object
        dp_obj.description = 'the very first dp'
        # now write the dp back to the registry
        update_result = self.dpsc_cli.update_data_product(dp_obj)

        # now get the dp back to see if it was updated
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertEquals(dp_obj.description,'the very first dp')

        # now 'delete' the data product
        log.debug("deleting data product: %s" % dp_id)
        self.dpsc_cli.delete_data_product(dp_id)
Ejemplo n.º 13
0
    def test_observatory_structure(self):
        """

        """
        c = self.client

        c2 = DotDict()
        c2.resource_registry = self.client.RR

        instrument_site_impl    = InstrumentSiteImpl(c2)
        platform_site_impl      = PlatformSiteImpl(c2)
        platform_agent_impl     = PlatformAgentImpl(c2)
        instrument_device_impl  = InstrumentDeviceImpl(c2)
        sensor_device_impl      = SensorDeviceImpl(c2)

        #generate a function that finds direct associations, using the more complex one in the service
        def gen_find_oms_association(output_type):
            def freeze():
                def finder_fun(obj_id):
                    ret = c.OMS.find_related_frames_of_reference(obj_id, [output_type])
                    return ret[output_type]
                return finder_fun
            
            return freeze()


        #resource_ids = self._low_level_init()


        ###############################################
        #
        # Assumptions or Order of Events for R2 Preloaded resources
        #
        # - orgs
        # - sites
        # - models
        # - agents
        # - devices
        # - instances
        # - attachments
        #
        ###############################################


        ###############################################
        #
        # orgs
        #
        ###############################################


        ###############################################
        #
        # sites
        #
        ###############################################

        log.info("Create an observatory")
        observatory_id = self.generic_fcruf_script(RT.Observatory, 
                                          "observatory", 
                                          self.client.OMS, 
                                          True)

        log.info("Create a subsite")
        subsite_id = self.generic_fcruf_script(RT.Subsite,
                                            "subsite",
                                            self.client.OMS,
                                            True)

        log.info("Create a platform site")
        platform_site_id = self.generic_fcruf_script(RT.PlatformSite,
                                                     "platform_site",
                                                     self.client.OMS,
                                                     True)
        
        log.info("Create instrument site")
        instrument_site_id = self.generic_fcruf_script(RT.InstrumentSite,
                                                       "instrument_site",
                                                       self.client.OMS,
                                                       True)
        
        ###############################################
        #
        # models
        #
        ###############################################

        log.info("Create a platform model")
        platform_model_id = self.generic_fcruf_script(RT.PlatformModel, 
                                                     "platform_model", 
                                                     self.client.IMS, 
                                                     True)

        log.info("Create instrument model")
        instrument_model_id = self.generic_fcruf_script(RT.InstrumentModel, 
                                                        "instrument_model", 
                                                        self.client.IMS, 
                                                        True)

        log.info("Create sensor model")
        sensor_model_id = self.generic_fcruf_script(RT.SensorModel, 
                                                        "sensor_model", 
                                                        self.client.IMS, 
                                                        True)


        ###############################################
        #
        # agents
        #
        ###############################################

        log.info("Create platform agent")
        platform_agent_id = self.generic_fcruf_script(RT.PlatformAgent, 
                                                      "platform_agent", 
                                                      self.client.IMS, 
                                                      False)
        
        log.info("Create instrument agent")
        instrument_agent_id = self.generic_fcruf_script(RT.InstrumentAgent, 
                                                        "instrument_agent", 
                                                        self.client.IMS, 
                                                        False)


        ###############################################
        #
        # devices
        #
        ###############################################

        log.info("Create a platform device")
        platform_device_id = self.generic_fcruf_script(RT.PlatformDevice, 
                                                    "platform_device", 
                                                    self.client.IMS, 
                                                    False)
        log.info("Create an instrument device")
        instrument_device_id = self.generic_fcruf_script(RT.InstrumentDevice, 
                                                         "instrument_device", 
                                                         self.client.IMS, 
                                                         False)

        log.info("Create a sensor device")
        sensor_device_id = self.generic_fcruf_script(RT.SensorDevice, 
                                                         "sensor_device", 
                                                         self.client.IMS, 
                                                         False)




        ###############################################
        #
        # instances
        #
        ###############################################




        ###############################################
        #
        #
        # attachments and LCS stuff
        #
        #
        ###############################################
        
        #----------------------------------------------
        #
        # orgs
        #
        #----------------------------------------------
        
        #----------------------------------------------
        #
        # sites
        #
        #----------------------------------------------

        log.info("Associate subsite with observatory")
        self.generic_association_script(c.OMS.assign_site_to_site,
                                        gen_find_oms_association(RT.Observatory),
                                        gen_find_oms_association(RT.Subsite),
                                        observatory_id,
                                        subsite_id)

        log.info("Associate platform site with subsite")
        self.generic_association_script(c.OMS.assign_site_to_site,
                                        gen_find_oms_association(RT.Subsite),
                                        gen_find_oms_association(RT.PlatformSite),
                                        subsite_id,
                                        platform_site_id)

        log.info("Associate instrument site with platform site")
        self.generic_association_script(c.OMS.assign_site_to_site,
                                        gen_find_oms_association(RT.PlatformSite),
                                        gen_find_oms_association(RT.InstrumentSite),
                                        platform_site_id,
                                        instrument_site_id)

        
        
        #----------------------------------------------
        #
        # models
        #
        #----------------------------------------------
        
        log.info("Associate platform model with platform site")
        self.generic_association_script(c.OMS.assign_platform_model_to_platform_site,
                                        platform_site_impl.find_having_model,
                                        platform_site_impl.find_stemming_model,
                                        platform_site_id,
                                        platform_model_id)

        log.info("Associate instrument model with instrument site")
        self.generic_association_script(c.OMS.assign_instrument_model_to_instrument_site,
                                        instrument_site_impl.find_having_model,
                                        instrument_site_impl.find_stemming_model,
                                        instrument_site_id,
                                        instrument_model_id)


        #----------------------------------------------
        #
        # agents
        #
        # - model required for DEVELOP
        # - egg required for INTEGRATE
        # - certification required for DEPLOY 
        #----------------------------------------------
        
        self.generic_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.PLAN, LCS.PLANNED)
        self.generic_lcs_fail(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEVELOP)
        log.info("Associate platform model with platform agent")
        self.generic_association_script(c.IMS.assign_platform_model_to_platform_agent,
                                        platform_agent_impl.find_having_model,
                                        platform_agent_impl.find_stemming_model,
                                        platform_agent_id,
                                        platform_model_id)
        self.generic_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEVELOP, LCS.DEVELOPED)
        self.generic_lcs_fail(self.client.IMS, "platform_agent", platform_agent_id, LCE.INTEGRATE)
        add_keyworded_attachment(self.client.RR, platform_agent_id, [KeywordFlag.EGG_URL])
        self.generic_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.INTEGRATE, LCS.INTEGRATED)
        self.generic_lcs_fail(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEPLOY)
        add_keyworded_attachment(self.client.RR, platform_agent_id, [KeywordFlag.CERTIFICATION, "platform attachment"])
        self.generic_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEPLOY, LCS.DEPLOYED)


        self.generic_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.PLAN, LCS.PLANNED)
        self.generic_lcs_fail(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEVELOP)
        log.info("Associate instrument model with instrument agent")
        self.generic_association_script(c.IMS.assign_instrument_model_to_instrument_agent,
                                        c.IMS.find_instrument_agent_by_instrument_model,
                                        c.IMS.find_instrument_model_by_instrument_agent,
                                        instrument_agent_id,
                                        instrument_model_id)
        self.generic_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEVELOP, LCS.DEVELOPED)
        self.generic_lcs_fail(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.INTEGRATE)
        add_keyworded_attachment(self.client.RR, instrument_agent_id, [KeywordFlag.EGG_URL])
        self.generic_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.INTEGRATE, LCS.INTEGRATED)
        self.generic_lcs_fail(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEPLOY)
        add_keyworded_attachment(self.client.RR, instrument_agent_id, [KeywordFlag.CERTIFICATION])
        self.generic_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEPLOY, LCS.DEPLOYED)


        #----------------------------------------------
        #
        # devices
        #
        #----------------------------------------------

        self.generic_lcs_pass(self.client.IMS, "platform_device", platform_device_id, LCE.PLAN, LCS.PLANNED)
        self.generic_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.INTEGRATE)
        log.info("Associate platform model with platform device")
        self.generic_association_script(c.IMS.assign_platform_model_to_platform_device,
                                        c.IMS.find_platform_device_by_platform_model,
                                        c.IMS.find_platform_model_by_platform_device,
                                        platform_device_id,
                                        platform_model_id)
        self.generic_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.INTEGRATE)


        log.info("Associate instrument model with instrument device")
        self.generic_association_script(c.IMS.assign_instrument_model_to_instrument_device,
                                        c.IMS.find_instrument_device_by_instrument_model,
                                        c.IMS.find_instrument_model_by_instrument_device,
                                        instrument_device_id,
                                        instrument_model_id)


        log.info("Associate instrument device with platform device")
        self.generic_association_script(c.IMS.assign_instrument_device_to_platform_device,
                                        c.IMS.find_platform_device_by_instrument_device,
                                        c.IMS.find_instrument_device_by_platform_device,
                                        platform_device_id,
                                        instrument_device_id)


        log.info("Associate sensor model with sensor device")
        self.generic_association_script(c.IMS.assign_sensor_model_to_sensor_device,
                                        sensor_device_impl.find_having_model,
                                        sensor_device_impl.find_stemming_model,
                                        sensor_device_id,
                                        sensor_model_id)



        log.info("Associate sensor device with instrument device")
        self.generic_association_script(c.IMS.assign_sensor_device_to_instrument_device,
                                        instrument_device_impl.find_having_device,
                                        instrument_device_impl.find_stemming_device,
                                        instrument_device_id,
                                        sensor_device_id)


        #----------------------------------------------
        #
        # instances
        #
        #----------------------------------------------






        #----------------------------------------------
        #
        # data production chain and swapping
        #
        #----------------------------------------------

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        ctd_stream_def = ctd_stream_definition()
        ctd_stream_def_id = self.client.PSMS.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #create data products for instrument data

        log.debug('test_createDataProduct: Creating new data product w/o a stream definition: %s' % ctd_stream_def_id)

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------
        log.debug("parameter dictionary: %s" % parameter_dictionary)

        inst_data_product_id = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)
        log_data_product_id = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)

        #assign data products appropriately
        c.DAMS.assign_data_product(input_resource_id=instrument_device_id,
                                   data_product_id=inst_data_product_id)
        c.OMS.create_site_data_product(instrument_site_id, log_data_product_id)

        deployment_id = self.generic_fcruf_script(RT.Deployment, "deployment", c.OMS, False)

        c.OMS.deploy_instrument_site(instrument_site_id, deployment_id)
        c.IMS.deploy_instrument_device(instrument_device_id, deployment_id)

        c.OMS.activate_deployment(deployment_id, True)


        #now along comes a new device
        log.info("Create instrument device 2")
        instrument_device_id2 = self.generic_fcruf_script(RT.InstrumentDevice,
                                                         "instrument_device",
                                                         self.client.IMS,
                                                         False)
        log.info("Associate instrument model with instrument device 2")
        self.generic_association_script(c.IMS.assign_instrument_model_to_instrument_device,
                                        c.IMS.find_instrument_device_by_instrument_model,
                                        c.IMS.find_instrument_model_by_instrument_device,
                                        instrument_device_id2,
                                        instrument_model_id)
        log.info("Associate instrument device with platform device 2")
        self.generic_association_script(c.IMS.assign_instrument_device_to_platform_device,
                                    c.IMS.find_platform_device_by_instrument_device,
                                    c.IMS.find_instrument_device_by_platform_device,
                                    platform_device_id,
                                    instrument_device_id2)
        inst_data_product_id2 = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)
        c.DAMS.assign_data_product(input_resource_id=instrument_device_id2,
                                   data_product_id=inst_data_product_id2)

        # create a new deployment for the new device
        deployment_id2 = self.generic_fcruf_script(RT.Deployment, "deployment", c.OMS, False)
        c.OMS.deploy_instrument_site(instrument_site_id, deployment_id2)
        c.IMS.deploy_instrument_device(instrument_device_id2, deployment_id2)

        # activate the new deployment -- changing the primary device -- but don't switch subscription
        c.OMS.activate_deployment(deployment_id2, False)
        #todo: assert site hasDevice instrument_device_id2

        c.OMS.transfer_site_subscription(instrument_site_id)

        #----------------------------------------------
        #
        # generic find ops
        #
        #----------------------------------------------



        log.info("Find an instrument site by observatory")

        entities = c.OMS.find_related_frames_of_reference(observatory_id, [RT.InstrumentSite])
        self.assertIn(RT.InstrumentSite, entities)
        inst_sites = entities[RT.InstrumentSite]
        self.assertEqual(1, len(inst_sites))
        self.assertEqual(instrument_site_id, inst_sites[0]._id)
Ejemplo n.º 14
0
def instrument_test_driver(container):

    org_client = OrgManagementServiceClient(node=container.node)
    id_client = IdentityManagementServiceClient(node=container.node)

    system_actor = id_client.find_actor_identity_by_name(name=CFG.system.system_actor)
    log.info("system actor:" + system_actor._id)

    sa_header_roles = get_role_message_headers(org_client.find_all_roles_by_user(system_actor._id))

    # Names of agent data streams to be configured.
    parsed_stream_name = "ctd_parsed"
    raw_stream_name = "ctd_raw"

    # Driver configuration.
    # Simulator

    driver_config = {
        "svr_addr": "localhost",
        "cmd_port": 5556,
        "evt_port": 5557,
        "dvr_mod": "ion.services.mi.drivers.sbe37_driver",
        "dvr_cls": "SBE37Driver",
        "comms_config": {
            SBE37Channel.CTD: {
                "method": "ethernet",
                "device_addr": CFG.device.sbe37.host,
                "device_port": CFG.device.sbe37.port,
                "server_addr": "localhost",
                "server_port": 8888,
            }
        },
    }

    # Hardware

    _container_client = ContainerAgentClient(node=container.node, name=container.name)

    # Create a pubsub client to create streams.
    _pubsub_client = PubsubManagementServiceClient(node=container.node)

    # A callback for processing subscribed-to data.
    def consume(message, headers):
        log.info("Subscriber received message: %s", str(message))

    # Create a stream subscriber registrar to create subscribers.
    subscriber_registrar = StreamSubscriberRegistrar(process=container, node=container.node)

    subs = []

    # Create streams for each stream named in driver.
    stream_config = {}
    for (stream_name, val) in PACKET_CONFIG.iteritems():
        stream_def = ctd_stream_definition(stream_id=None)
        stream_def_id = _pubsub_client.create_stream_definition(container=stream_def)
        stream_id = _pubsub_client.create_stream(
            name=stream_name,
            stream_definition_id=stream_def_id,
            original=True,
            encoding="ION R2",
            headers={"ion-actor-id": system_actor._id, "ion-actor-roles": sa_header_roles},
        )
        stream_config[stream_name] = stream_id

        # Create subscriptions for each stream.
        exchange_name = "%s_queue" % stream_name
        sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name, callback=consume)
        sub.start()
        query = StreamQuery(stream_ids=[stream_id])
        sub_id = _pubsub_client.create_subscription(query=query, exchange_name=exchange_name)
        _pubsub_client.activate_subscription(sub_id)
        subs.append(sub)

    # Create agent config.

    agent_resource_id = "123xyz"

    agent_config = {
        "driver_config": driver_config,
        "stream_config": stream_config,
        "agent": {"resource_id": agent_resource_id},
    }

    # Launch an instrument agent process.
    _ia_name = "agent007"
    _ia_mod = "ion.services.mi.instrument_agent"
    _ia_class = "InstrumentAgent"
    _ia_pid = _container_client.spawn_process(name=_ia_name, module=_ia_mod, cls=_ia_class, config=agent_config)

    log.info("got pid=%s for resource_id=%s" % (str(_ia_pid), str(agent_resource_id)))
    def test_createDataProcess(self):

        #-------------------------------
        # Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all',
                            process_source='some_source_reference')
        try:
            dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new data process definition: %s" %ex)


        # test Data Process Definition creation in rr
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.name,'ctd_L0_all')

        # Create an input instrument
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)
        log.debug("TestIntDataProcessMgmtServiceMultiOut  data_producer_id %s" % data_producer_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = ctd_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data')

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )


        #-------------------------------
        # Input Data Product
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create input data product")



        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(input_dp_obj, ctd_stream_def_id, parameter_dictionary)

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        log.debug("TestIntDataProcessMgmtServiceMultiOut: in stream_ids "   +  str(stream_ids))
        self.in_stream_id = stream_ids[0]
        log.debug("TestIntDataProcessMgmtServiceMultiOut: Input Stream: "   +  str( self.in_stream_id))

        #-------------------------------
        # Output Data Product
        #-------------------------------

        outgoing_stream_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_conductivity, name='conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id )

        outgoing_stream_pressure = L0_pressure_stream_definition()
        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_pressure, name='pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id )

        outgoing_stream_temperature = L0_temperature_stream_definition()
        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_temperature, name='temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id )


        self.output_products={}
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create output data product conductivity")

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id, parameter_dictionary)
        self.output_products['conductivity'] = output_dp_id_1
        self.dataproductclient.activate_data_product_persistence(data_product_id=output_dp_id_1)


        log.debug("TestIntDataProcessMgmtServiceMultiOut: create output data product pressure")

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id, parameter_dictionary)
        self.output_products['pressure'] = output_dp_id_2
        self.dataproductclient.activate_data_product_persistence(data_product_id=output_dp_id_2)

        log.debug("TestIntDataProcessMgmtServiceMultiOut: create output data product temperature")

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id, parameter_dictionary)
        self.output_products['temperature'] = output_dp_id_3
        self.dataproductclient.activate_data_product_persistence(data_product_id=output_dp_id_3)


        #-------------------------------
        # Create the data process
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create_data_process start")
        try:
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("TestIntDataProcessMgmtServiceMultiOut: create_data_process return")