示例#1
0
    def on_start(self):
        super(IngestionLauncher, self).on_start()

        exchange_point = self.CFG.get_safe('ingestion.exchange_point',
                                           'science_data')
        couch_opts = self.CFG.get_safe('ingestion.couch_storage', {})
        couch_storage = CouchStorage(**couch_opts)
        hdf_opts = self.CFG.get_safe('ingestion.hdf_storage', {})
        hdf_storage = HdfStorage(**hdf_opts)
        number_of_workers = self.CFG.get_safe('ingestion.number_of_workers', 2)

        ingestion_management_service = IngestionManagementServiceClient(
            node=self.container.node)
        ingestion_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id=exchange_point,
            couch_storage=couch_storage,
            hdf_storage=hdf_storage,
            number_of_workers=number_of_workers)
        ingestion_management_service.activate_ingestion_configuration(
            ingestion_id)
    def post_ingestion_management(self, config):
        """
        Defining the ingestion worker process is done in post_process_dispatcher.

        Creating transform workers happens here...
        """
        exchange_point = config.get_safe('ingestion.exchange_point',
                                         'science_data')
        couch_opts = config.get_safe('ingestion.couch_storage', {})
        couch_storage = CouchStorage(**couch_opts)
        hdf_opts = config.get_safe('ingestion.hdf_storage', {})
        hdf_storage = HdfStorage(**hdf_opts)
        number_of_workers = config.get_safe('ingestion.number_of_workers', 2)

        ingestion_id = self.clients.ingestion_management.create_ingestion_configuration(
            exchange_point_id=exchange_point,
            couch_storage=couch_storage,
            hdf_storage=hdf_storage,
            number_of_workers=number_of_workers)
        self.clients.ingestion_management.activate_ingestion_configuration(
            ingestion_id)
示例#3
0
    def test_dm_integration(self):
        '''
        test_salinity_transform
        Test full DM Services Integration
        '''
        cc = self.container
        assertions = self.assertTrue

        #-----------------------------
        # Copy below here to run as a script (don't forget the imports of course!)
        #-----------------------------

        # Create some service clients...
        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(
            node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(
            node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)
        transform_management_service = TransformManagementServiceClient(
            node=cc.node)
        process_dispatcher = ProcessDispatcherServiceClient(node=cc.node)

        # declare some handy variables

        datastore_name = 'test_dm_integration'

        ###
        ### In the beginning there were two stream definitions...
        ###
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = pubsub_management_service.create_stream_definition(
            container=ctd_stream_def, name='Simulated CTD data')

        # create a stream definition for the data from the salinity Transform
        sal_stream_def_id = pubsub_management_service.create_stream_definition(
            container=SalinityTransform.outgoing_stream_def,
            name='Scalar Salinity data stream')

        ###
        ### And two process definitions...
        ###
        # one for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': 'ion.processes.data.ctd_stream_publisher',
            'class': 'SimpleCtdPublisher'
        }

        ctd_sim_procdef_id = process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        # one for the salinity transform
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': 'ion.processes.data.transforms.ctd.ctd_L2_salinity',
            'class': 'SalinityTransform'
        }

        salinity_transform_procdef_id = process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        #---------------------------
        # Set up ingestion - this is an operator concern - not done by SA in a deployed system
        #---------------------------
        # Configure ingestion using eight workers, ingesting to test_dm_integration datastore with the SCIDATA profile
        log.debug('Calling create_ingestion_configuration')
        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id='science_data',
            couch_storage=CouchStorage(datastore_name=datastore_name,
                                       datastore_profile='SCIDATA'),
            number_of_workers=1)
        #
        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        #---------------------------
        # Set up the producer (CTD Simulator)
        #---------------------------

        # Create the stream
        ctd_stream_id = pubsub_management_service.create_stream(
            stream_definition_id=ctd_stream_def_id)

        # Set up the datasets
        ctd_dataset_id = dataset_management_service.create_dataset(
            stream_id=ctd_stream_id,
            datastore_name=datastore_name,
            view_name='datasets/stream_join_granule')

        # Configure ingestion of this dataset
        ctd_dataset_config_id = ingestion_management_service.create_dataset_configuration(
            dataset_id=ctd_dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=
            ingestion_configuration_id,  # you need to know the ingestion configuration id!
        )
        # Hold onto ctd_dataset_config_id if you want to stop/start ingestion of that dataset by the ingestion service

        #---------------------------
        # Set up the salinity transform
        #---------------------------

        # Create the stream
        sal_stream_id = pubsub_management_service.create_stream(
            stream_definition_id=sal_stream_def_id)

        # Set up the datasets
        sal_dataset_id = dataset_management_service.create_dataset(
            stream_id=sal_stream_id,
            datastore_name=datastore_name,
            view_name='datasets/stream_join_granule')

        # Configure ingestion of the salinity as a dataset
        sal_dataset_config_id = ingestion_management_service.create_dataset_configuration(
            dataset_id=sal_dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=
            ingestion_configuration_id,  # you need to know the ingestion configuration id!
        )
        # Hold onto sal_dataset_config_id if you want to stop/start ingestion of that dataset by the ingestion service

        # Create a subscription as input to the transform
        sal_transform_input_subscription_id = pubsub_management_service.create_subscription(
            query=StreamQuery(stream_ids=[
                ctd_stream_id,
            ]),
            exchange_name='salinity_transform_input'
        )  # how do we make these names??? i.e. Should they be anonymous?

        # create the salinity transform
        sal_transform_id = transform_management_service.create_transform(
            name='example salinity transform',
            in_subscription_id=sal_transform_input_subscription_id,
            out_streams={
                'output': sal_stream_id,
            },
            process_definition_id=salinity_transform_procdef_id,
            # no configuration needed at this time...
        )
        # start the transform - for a test case it makes sense to do it before starting the producer but it is not required
        transform_management_service.activate_transform(
            transform_id=sal_transform_id)

        # Start the ctd simulator to produce some data
        configuration = {
            'process': {
                'stream_id': ctd_stream_id,
            }
        }
        ctd_sim_pid = process_dispatcher.schedule_process(
            process_definition_id=ctd_sim_procdef_id,
            configuration=configuration)

        ###
        ### Make a subscriber in the test to listen for salinity data
        ###
        salinity_subscription_id = pubsub_management_service.create_subscription(
            query=StreamQuery([
                sal_stream_id,
            ]),
            exchange_name='salinity_test',
            name="test salinity subscription",
        )

        pid = cc.spawn_process(name='dummy_process_for_test',
                               module='pyon.ion.process',
                               cls='SimpleProcess',
                               config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process,
                                                         node=cc.node)

        result = gevent.event.AsyncResult()
        results = []

        def message_received(message, headers):
            # Heads
            log.warn('Salinity data received!')
            results.append(message)
            if len(results) > 3:
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(
            exchange_name='salinity_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        pubsub_management_service.activate_subscription(
            subscription_id=salinity_subscription_id)

        # Assert that we have received data
        assertions(result.get(timeout=10))

        # stop the flow parse the messages...
        process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        for message in results:

            psd = PointSupplementStreamParser(
                stream_definition=SalinityTransform.outgoing_stream_def,
                stream_granule=message)

            # Test the handy info method for the names of fields in the stream def
            assertions('salinity' in psd.list_field_names())

            # you have to know the name of the coverage in stream def
            salinity = psd.get_values('salinity')

            import numpy

            assertions(isinstance(salinity, numpy.ndarray))

            assertions(numpy.nanmin(salinity) >
                       0.0)  # salinity should always be greater than 0
示例#4
0
    def test_raw_stream_integration(self):
        cc = self.container
        assertions = self.assertTrue

        #-----------------------------
        # Copy below here to run as a script (don't forget the imports of course!)
        #-----------------------------

        # Create some service clients...
        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(
            node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(
            node=cc.node)
        process_dispatcher = ProcessDispatcherServiceClient(node=cc.node)

        # declare some handy variables

        datastore_name = 'test_dm_integration'

        ###
        ### In the beginning there was one stream definitions...
        ###
        # create a stream definition for the data from the ctd simulator
        raw_ctd_stream_def = SBE37_RAW_stream_definition()
        raw_ctd_stream_def_id = pubsub_management_service.create_stream_definition(
            container=raw_ctd_stream_def, name='Simulated RAW CTD data')

        ###
        ### And two process definitions...
        ###
        # one for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': 'ion.processes.data.raw_stream_publisher',
            'class': 'RawStreamPublisher'
        }

        raw_ctd_sim_procdef_id = process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        #---------------------------
        # Set up ingestion - this is an operator concern - not done by SA in a deployed system
        #---------------------------
        # Configure ingestion using eight workers, ingesting to test_dm_integration datastore with the SCIDATA profile
        log.debug('Calling create_ingestion_configuration')
        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id='science_data',
            couch_storage=CouchStorage(datastore_name=datastore_name,
                                       datastore_profile='SCIDATA'),
            number_of_workers=1)
        #
        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        #---------------------------
        # Set up the producer (CTD Simulator)
        #---------------------------

        # Create the stream
        raw_ctd_stream_id = pubsub_management_service.create_stream(
            stream_definition_id=raw_ctd_stream_def_id)

        # Set up the datasets
        raw_ctd_dataset_id = dataset_management_service.create_dataset(
            stream_id=raw_ctd_stream_id,
            datastore_name=datastore_name,
            view_name='datasets/stream_join_granule')

        # Configure ingestion of this dataset
        raw_ctd_dataset_config_id = ingestion_management_service.create_dataset_configuration(
            dataset_id=raw_ctd_dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=
            ingestion_configuration_id,  # you need to know the ingestion configuration id!
        )
        # Hold onto ctd_dataset_config_id if you want to stop/start ingestion of that dataset by the ingestion service

        # Start the ctd simulator to produce some data
        configuration = {
            'process': {
                'stream_id': raw_ctd_stream_id,
            }
        }
        raw_sim_pid = process_dispatcher.schedule_process(
            process_definition_id=raw_ctd_sim_procdef_id,
            configuration=configuration)

        ###
        ### Make a subscriber in the test to listen for salinity data
        ###
        raw_subscription_id = pubsub_management_service.create_subscription(
            query=StreamQuery([
                raw_ctd_stream_id,
            ]),
            exchange_name='raw_test',
            name="test raw subscription",
        )

        # this is okay - even in cei mode!
        pid = cc.spawn_process(name='dummy_process_for_test',
                               module='pyon.ion.process',
                               cls='SimpleProcess',
                               config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process,
                                                         node=cc.node)

        result = gevent.event.AsyncResult()
        results = []

        def message_received(message, headers):
            # Heads
            log.warn('Raw data received!')
            results.append(message)
            if len(results) > 3:
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(
            exchange_name='raw_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        pubsub_management_service.activate_subscription(
            subscription_id=raw_subscription_id)

        # Assert that we have received data
        assertions(result.get(timeout=10))

        # stop the flow parse the messages...
        process_dispatcher.cancel_process(
            raw_sim_pid
        )  # kill the ctd simulator process - that is enough data

        gevent.sleep(1)

        for message in results:

            sha1 = message.identifiables['stream_encoding'].sha1

            data = message.identifiables['data_stream'].values

            filename = FileSystem.get_hierarchical_url(FS.CACHE, sha1, ".raw")

            with open(filename, 'r') as f:

                assertions(data == f.read())
    def test_replay_integration(self):
        '''
        test_replay_integration
        '''
        import numpy as np
        # Keep the import it's used in the vector comparison below even though pycharm says its unused.

        cc = self.container
        XP = self.XP
        assertions = self.assertTrue

        ### Every thing below here can be run as a script:
        log.debug('Got it')

        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(
            node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(
            node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)

        datastore_name = 'dm_test_replay_integration'

        producer = Publisher(name=(XP, 'stream producer'))

        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id=XP,
            couch_storage=CouchStorage(datastore_name=datastore_name,
                                       datastore_profile='SCIDATA'),
            hdf_storage=HdfStorage(),
            number_of_workers=1)

        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        definition = SBE37_CDM_stream_definition()
        data_stream_id = definition.data_stream_id
        encoding_id = definition.identifiables[data_stream_id].encoding_id
        element_count_id = definition.identifiables[
            data_stream_id].element_count_id

        stream_def_id = pubsub_management_service.create_stream_definition(
            container=definition)
        stream_id = pubsub_management_service.create_stream(
            stream_definition_id=stream_def_id)

        dataset_id = dataset_management_service.create_dataset(
            stream_id=stream_id,
            datastore_name=datastore_name,
            view_name='datasets/dataset_by_id')
        ingestion_management_service.create_dataset_configuration(
            dataset_id=dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=ingestion_configuration_id)
        definition.stream_resource_id = stream_id

        packet = _create_packet(definition)
        input_file = FileSystem.mktemp()
        input_file.write(packet.identifiables[data_stream_id].values)
        input_file_path = input_file.name
        input_file.close()

        fields = [
            'conductivity', 'height', 'latitude', 'longitude', 'pressure',
            'temperature', 'time'
        ]

        input_vectors = acquire_data([input_file_path], fields, 2).next()

        producer.publish(msg=packet, to_name=(XP, '%s.data' % stream_id))

        replay_id, replay_stream_id = data_retriever_service.define_replay(
            dataset_id)
        ar = gevent.event.AsyncResult()

        def sub_listen(msg, headers):

            assertions(isinstance(msg, StreamGranuleContainer),
                       'replayed message is not a granule.')
            hdf_string = msg.identifiables[data_stream_id].values
            sha1 = hashlib.sha1(hdf_string).hexdigest().upper()
            assertions(sha1 == msg.identifiables[encoding_id].sha1,
                       'Checksum failed.')
            assertions(
                msg.identifiables[element_count_id].value == 1,
                'record replay count is incorrect %d.' %
                msg.identifiables[element_count_id].value)
            output_file = FileSystem.mktemp()
            output_file.write(msg.identifiables[data_stream_id].values)
            output_file_path = output_file.name
            output_file.close()
            output_vectors = acquire_data([output_file_path], fields, 2).next()
            for field in fields:
                comparison = (input_vectors[field]['values'] ==
                              output_vectors[field]['values'])
                assertions(
                    comparison.all(), 'vector mismatch: %s vs %s' %
                    (input_vectors[field]['values'],
                     output_vectors[field]['values']))
            FileSystem.unlink(output_file_path)
            ar.set(True)

        subscriber = Subscriber(name=(XP, 'replay listener'),
                                callback=sub_listen)

        g = gevent.Greenlet(subscriber.listen,
                            binding='%s.data' % replay_stream_id)
        g.start()

        data_retriever_service.start_replay(replay_id)

        ar.get(timeout=10)

        FileSystem.unlink(input_file_path)
    def test_usgs_integration(self):
        '''
        test_usgs_integration
        Test full DM Services Integration using usgs
        '''
        cc = self.container
        assertions = self.assertTrue

        #-----------------------------
        # Copy below here
        #-----------------------------
        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)
        transform_management_service = TransformManagementServiceClient(node=cc.node)
        process_dispatcher = ProcessDispatcherServiceClient(node=cc.node)

        process_list = []
        datasets = []

        datastore_name = 'test_usgs_integration'


        #---------------------------
        # Set up ingestion
        #---------------------------
        # Configure ingestion using eight workers, ingesting to test_dm_integration datastore with the SCIDATA profile
        log.debug('Calling create_ingestion_configuration')
        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id='science_data',
            couch_storage=CouchStorage(datastore_name=datastore_name,datastore_profile='SCIDATA'),
            number_of_workers=8
        )
        #
        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        usgs_stream_def = USGS_stream_definition()

        stream_def_id = pubsub_management_service.create_stream_definition(container=usgs_stream_def, name='Junk definition')


        #---------------------------
        # Set up the producers (CTD Simulators)
        #---------------------------
        # Launch five simulated CTD producers
        for iteration in xrange(2):
            # Make a stream to output on

            stream_id = pubsub_management_service.create_stream(stream_definition_id=stream_def_id)

            #---------------------------
            # Set up the datasets
            #---------------------------
            dataset_id = dataset_management_service.create_dataset(
                stream_id=stream_id,
                datastore_name=datastore_name,
                view_name='datasets/stream_join_granule'
            )
            # Keep track of the datasets
            datasets.append(dataset_id)

            stream_policy_id = ingestion_management_service.create_dataset_configuration(
                dataset_id = dataset_id,
                archive_data = True,
                archive_metadata = True,
                ingestion_configuration_id = ingestion_configuration_id
            )


            producer_definition = ProcessDefinition()
            producer_definition.executable = {
                'module':'eoi.agent.handler.usgs_stream_publisher',
                'class':'UsgsPublisher'
            }
            configuration = {
                'process':{
                    'stream_id':stream_id,
                    }
            }
            procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
            log.debug('LUKE_DEBUG: procdef_id: %s', procdef_id)
            pid = process_dispatcher.schedule_process(process_definition_id=procdef_id, configuration=configuration)


            # Keep track, we'll kill 'em later.
            process_list.append(pid)
            # Get about 4 seconds of data
        time.sleep(4)

        #---------------------------
        # Stop producing data
        #---------------------------

        for process in process_list:
            process_dispatcher.cancel_process(process)

        #----------------------------------------------
        # The replay and the transform, a love story.
        #----------------------------------------------
        # Happy Valentines to the clever coder who catches the above!

        transform_definition = ProcessDefinition()
        transform_definition.executable = {
            'module':'ion.processes.data.transforms.transform_example',
            'class':'TransformCapture'
        }
        transform_definition_id = process_dispatcher.create_process_definition(process_definition=transform_definition)

        dataset_id = datasets.pop() # Just need one for now
        replay_id, stream_id = data_retriever_service.define_replay(dataset_id=dataset_id)

        #--------------------------------------------
        # I'm Selling magazine subscriptions here!
        #--------------------------------------------

        subscription = pubsub_management_service.create_subscription(query=StreamQuery(stream_ids=[stream_id]),
            exchange_name='transform_capture_point')

        #--------------------------------------------
        # Start the transform (capture)
        #--------------------------------------------
        transform_id = transform_management_service.create_transform(
            name='capture_transform',
            in_subscription_id=subscription,
            process_definition_id=transform_definition_id
        )

        transform_management_service.activate_transform(transform_id=transform_id)

        #--------------------------------------------
        # BEGIN REPLAY!
        #--------------------------------------------

        data_retriever_service.start_replay(replay_id=replay_id)

        #--------------------------------------------
        # Lets get some boundaries
        #--------------------------------------------

        bounds = dataset_management_service.get_dataset_bounds(dataset_id=dataset_id)
示例#7
0
    def test_createTransformsThenActivateInstrument(self):

        # Set up the preconditions
        # ingestion configuration parameters
        self.exchange_point_id = 'science_data'
        self.number_of_workers = 2
        self.hdf_storage = HdfStorage(relative_path='ingest')
        self.couch_storage = CouchStorage(datastore_name='test_datastore')
        self.XP = 'science_data'
        self.exchange_name = 'ingestion_queue'

        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel",
                                  model_label="SBE37IMModel")
        try:
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" % ex)
        print 'test_createTransformsThenActivateInstrument: new InstrumentModel id = ', instModel_id

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_module="ion.agents.instrument.instrument_agent",
            driver_class="InstrumentAgent")
        try:
            instAgent_id = self.imsclient.create_instrument_agent(
                instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" % ex)
        print 'test_createTransformsThenActivateInstrument: new InstrumentAgent id = ', instAgent_id

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        try:
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" % ex)

        print 'test_createTransformsThenActivateInstrument: new InstrumentDevice id = ', instDevice_id

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------
        #        driver_config = {
        #            'svr_addr': "localhost",
        #            'cmd_port': 5556,
        #            'evt_port': 5557,
        #            'dvr_mod': "ion.agents.instrument.drivers.sbe37.sbe37_driver",
        #            'dvr_cls': "SBE37Driver",
        #            'comms_config': {
        #                    'addr': 'sbe37-simulator.oceanobservatories.org',
        #                    'port': 4001,
        #                }
        #            }
        driver_config = {
            'dvr_mod': 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'workdir': '/tmp/',
        }

        instAgentInstance_obj = IonObject(
            RT.InstrumentAgentInstance,
            name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            driver_config=driver_config,
            comms_device_address='sbe37-simulator.oceanobservatories.org',
            comms_device_port=4001,
            port_agent_work_dir='/tmp/',
            port_agent_delimeter=['<<', '>>'])
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            container=ctd_stream_def)

        print 'test_createTransformsThenActivateInstrument: new Stream Definition id = ', instDevice_id

        print 'Creating new CDM data product with a stream definition'
        dp_obj = IonObject(RT.DataProduct,
                           name='ctd_parsed',
                           description='ctd stream test')
        try:
            ctd_parsed_data_product = self.dataproductclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product,
                                                   PRED.hasStream, None, True)
        print 'test_createTransformsThenActivateInstrument: Data product streams1 = ', stream_ids

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        print 'test_createTransformsThenActivateInstrument: Creating new RAW data product with a stream definition'
        raw_stream_def = SBE37_RAW_stream_definition()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(
            container=raw_stream_def)

        dp_obj = IonObject(RT.DataProduct,
                           name='ctd_raw',
                           description='raw stream test')
        try:
            ctd_raw_data_product = self.dataproductclient.create_data_product(
                dp_obj, raw_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        print 'new ctd_raw_data_product_id = ', ctd_raw_data_product

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_raw_data_product,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product,
                                                   PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition ctd_L0_all"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L0_all',
            description='transform ctd package into three separate L0 streams',
            module='ion.processes.data.transforms.ctd.ctd_L0_all',
            class_name='ctd_L0_all',
            process_source='some_source_reference')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new ctd_L0_all data process definition: %s" %
                ex)

        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1ConductivityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_conductivity',
            description='create the L1 conductivity data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
            class_name='CTDL1ConductivityTransform',
            process_source='CTDL1ConductivityTransform source code here...')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1ConductivityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1PressureTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_pressure',
            description='create the L1 pressure data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
            class_name='CTDL1PressureTransform',
            process_source='CTDL1PressureTransform source code here...')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1PressureTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1TemperatureTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_temperature',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
            class_name='CTDL1TemperatureTransform',
            process_source='CTDL1TemperatureTransform source code here...')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1TemperatureTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition SalinityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_salinity',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform',
            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new SalinityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition DensityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_density',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_density',
            class_name='DensityTransform',
            process_source='DensityTransform source code here...')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new DensityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l0_conductivity, name='L0_Conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id)

        outgoing_stream_l0_pressure = L0_pressure_stream_definition()
        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l0_pressure, name='L0_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id)

        outgoing_stream_l0_temperature = L0_temperature_stream_definition()
        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l0_temperature, name='L0_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id)

        self.output_products = {}
        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L0 conductivity"
        )
        ctd_l0_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity')
        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_conductivity_output_dp_obj,
            outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l0_conductivity_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L0 pressure"
        )
        ctd_l0_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure')
        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l0_pressure_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L0 temperature"
        )
        ctd_l0_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature')
        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_temperature_output_dp_obj,
            outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l0_temperature_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity = L1_conductivity_stream_definition()
        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l1_conductivity, name='L1_conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_conductivity_id,
            ctd_L1_conductivity_dprocdef_id)

        outgoing_stream_l1_pressure = L1_pressure_stream_definition()
        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l1_pressure, name='L1_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id)

        outgoing_stream_l1_temperature = L1_temperature_stream_definition()
        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l1_temperature, name='L1_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L1 conductivity"
        )
        ctd_l1_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity')
        ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_conductivity_output_dp_obj,
            outgoing_stream_l1_conductivity_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l1_conductivity_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L1 pressure"
        )
        ctd_l1_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Pressure',
            description='transform output L1 pressure')
        ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l1_pressure_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L1 temperature"
        )
        ctd_l1_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Temperature',
            description='transform output L1 temperature')
        ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_temperature_output_dp_obj,
            outgoing_stream_l1_temperature_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l1_temperature_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity = L2_practical_salinity_stream_definition()
        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l2_salinity, name='L2_salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id)

        outgoing_stream_l2_density = L2_density_stream_definition()
        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l2_density, name='L2_Density')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L2 Salinity"
        )
        ctd_l2_salinity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Salinity',
            description='transform output L2 salinity')
        ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l2_salinity_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L2 Density"
        )
        ctd_l2_density_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Density',
            description='transform output pressure')
        ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l2_density_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L0 all data_process start"
        )
        try:
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L0_all_dprocdef_id, [ctd_parsed_data_product],
                self.output_products)
            self.dataprocessclient.activate_data_process(
                ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L0 all data_process return"
        )

        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L1 Conductivity data_process start"
        )
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L1_conductivity_dprocdef_id,
                [ctd_l0_conductivity_output_dp_id],
                {'output': ctd_l1_conductivity_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L1 Conductivity data_process return"
        )

        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process start"
        )
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id],
                {'output': ctd_l1_pressure_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process return"
        )

        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process start"
        )
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L1_temperature_dprocdef_id,
                [ctd_l0_temperature_output_dp_id],
                {'output': ctd_l1_temperature_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process return"
        )

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_salinity data_process start"
        )
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product],
                {'output': ctd_l2_salinity_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_salinity data_process return"
        )

        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_Density data_process start"
        )
        try:
            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L2_density_dprocdef_id, [ctd_parsed_data_product],
                {'output': ctd_l2_density_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_Density data_process return"
        )

        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        print 'test_createTransformsThenActivateInstrument: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(
            'iaclient',
            name=inst_agent_instance_obj.agent_process_id,
            process=FakeProcess())
        print 'activate_instrument: got ia client %s', self._ia_client
        log.debug(
            " test_createTransformsThenActivateInstrument:: got ia client %s",
            str(self._ia_client))

        #-------------------------------
        # Streaming
        #-------------------------------

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        print retval
        log.debug(
            "test_createTransformsThenActivateInstrument:: initialize %s",
            str(retval))

        time.sleep(1)

        cmd = AgentCommand(command='go_active')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_createTransformsThenActivateInstrument:: go_active %s",
                  str(reply))
        time.sleep(1)

        cmd = AgentCommand(command='run')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_createTransformsThenActivateInstrument:: run %s",
                  str(reply))
        time.sleep(1)

        # Make sure the sampling rate and transmission are sane.
        params = {
            SBE37Parameter.NAVG: 1,
            SBE37Parameter.INTERVAL: 5,
            SBE37Parameter.TXREALTIME: True
        }
        self._ia_client.set_param(params)

        self._no_samples = 2

        # Begin streaming.
        cmd = AgentCommand(command='go_streaming')
        reply = self._ia_client.execute_agent(cmd)
        log.debug(
            "test_createTransformsThenActivateInstrument:: go_streaming %s",
            str(reply))
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)

        time.sleep(15)

        log.debug("test_activateInstrument: calling go_observatory")
        cmd = AgentCommand(command='go_observatory')
        reply = self._ia_client.execute(cmd)
        log.debug("test_activateInstrument: return from go_observatory   %s",
                  str(reply))
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        time.sleep(2)

        log.debug(
            "test_createTransformsThenActivateInstrument:: calling reset ")
        cmd = AgentCommand(command='reset')
        reply = self._ia_client.execute_agent(cmd)
        log.debug(
            "test_createTransformsThenActivateInstrument:: return from reset %s",
            str(reply))
        time.sleep(2)

        #-------------------------------
        # Sampling
        #-------------------------------
        #        cmd = AgentCommand(command='initialize')
        #        retval = self._ia_client.execute_agent(cmd)
        #        print retval
        #        log.debug("test_createTransformsThenActivateInstrument:: initialize %s", str(retval))
        #        time.sleep(2)
        #
        #        cmd = AgentCommand(command='go_active')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: go_active %s", str(reply))
        #        time.sleep(2)
        #
        #        cmd = AgentCommand(command='run')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: run %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling acquire_sample ")
        #        cmd = AgentCommand(command='acquire_sample')
        #        reply = self._ia_client.execute(cmd)
        #        log.debug("test_activateInstrument: return from acquire_sample %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling acquire_sample 2")
        #        cmd = AgentCommand(command='acquire_sample')
        #        reply = self._ia_client.execute(cmd)
        #        log.debug("test_activateInstrument: return from acquire_sample 2   %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling acquire_sample 3")
        #        cmd = AgentCommand(command='acquire_sample')
        #        reply = self._ia_client.execute(cmd)
        #        log.debug("test_activateInstrument: return from acquire_sample 3   %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling go_inactive ")
        #        cmd = AgentCommand(command='go_inactive')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: return from go_inactive %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling reset ")
        #        cmd = AgentCommand(command='reset')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: return from reset %s", str(reply))
        #        time.sleep(2)
        #
        #
        #        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)
        #
        #
        #        #get the dataset id of the ctd_parsed product from the dataproduct  ctd_parsed_data_product
        #        ctd_parsed_data_product_obj = self.dataproductclient.read_data_product(ctd_parsed_data_product)
        #        log.debug("test_createTransformsThenActivateInstrument:: ctd_parsed_data_product dataset id %s", str(ctd_parsed_data_product_obj.dataset_id))
        #
        #        # ask for the dataset bounds from the datasetmgmtsvc
        #        bounds = self.datasetclient.get_dataset_bounds(ctd_parsed_data_product_obj.dataset_id)
        #        log.debug("test_createTransformsThenActivateInstrument:: ctd_parsed_data_product dataset bounds %s", str(bounds))
        #        print 'activate_instrument: got dataset bounds %s', str(bounds)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)