class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.force_delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    #@unittest.skip('Not done yet.')
    def test_register_instrument(self):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)





    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ',
                                handler_module = 'module_name',
                                handler_class = 'class_name')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
                self.client.delete_data_source_model(datamodel_id)
                self.client.delete_external_dataset_agent(datasetagent_id)
                self.client.delete_data_source_agent(datasource_agent_instance_id)

                self.client.force_delete_external_data_provider(dataprovider_id)
                self.client.force_delete_data_source(datasource_id)
                self.client.force_delete_external_dataset(extdataset_id)
                self.client.force_delete_data_source_model(datamodel_id)
                self.client.force_delete_external_dataset_agent(datasetagent_id)
                self.client.force_delete_data_source_agent(datasource_agent_instance_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)
class BulkIngestBase(object):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub_management    = PubsubManagementServiceClient()
        self.dataset_management   = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.data_acquisition_management = DataAcquisitionManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node)
        self.resource_registry       = self.container.resource_registry
        self.context_ids = self.build_param_contexts()
        self.setup_resources()

    def build_param_contexts(self):
        raise NotImplementedError('build_param_contexts must be implemented in child classes')

    def create_external_dataset(self):
        raise NotImplementedError('create_external_dataset must be implemented in child classes')

    def get_dvr_config(self):
        raise NotImplementedError('get_dvr_config must be implemented in child classes')

    def get_retrieve_client(self, dataset_id=''):
        raise NotImplementedError('get_retrieve_client must be implemented in child classes')

    def test_data_ingest(self):
        self.pdict_id = self.create_parameter_dict(self.name)
        self.stream_def_id = self.create_stream_def(self.name, self.pdict_id)
        self.data_product_id = self.create_data_product(self.name, self.description, self.stream_def_id)
        self.dataset_id = self.get_dataset_id(self.data_product_id)
        self.stream_id, self.route = self.get_stream_id_and_route(self.data_product_id)
        self.external_dataset_id = self.create_external_dataset()
        self.data_producer_id = self.register_external_dataset(self.external_dataset_id)
        self.start_agent()

    def create_parameter_dict(self, name=''):
        return self.dataset_management.create_parameter_dictionary(name=name, parameter_context_ids=self.context_ids, temporal_context='time')

    def create_stream_def(self, name='', pdict_id=''):
        return self.pubsub_management.create_stream_definition(name=name, parameter_dictionary_id=pdict_id)

    def create_data_product(self, name='', description='', stream_def_id=''):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name=name,
            description=description,
            processing_level_code='Parsed_Canonical',
            temporal_domain=tdom,
            spatial_domain=sdom)

        data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.data_product_management.activate_data_product_persistence(data_product_id)
        return data_product_id

    def register_external_dataset(self, external_dataset_id=''):
        return self.data_acquisition_management.register_external_data_set(external_dataset_id=external_dataset_id)

    def get_dataset_id(self, data_product_id=''):
        dataset_ids, assocs = self.resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True)
        return dataset_ids[0]

    def get_stream_id_and_route(self, data_product_id):
        stream_ids, _ = self.resource_registry.find_objects(data_product_id, PRED.hasStream, RT.Stream, id_only=True)
        stream_id = stream_ids[0]
        route = self.pubsub_management.read_stream_route(stream_id)
        #self.create_logger(self.name, stream_id)
        return stream_id, route

    def start_agent(self):
        agent_config = {
            'driver_config': self.get_dvr_config(),
            'stream_config': {},
            'agent': {'resource_id': self.external_dataset_id},
            'test_mode': True
        }

        _ia_pid = self.container.spawn_process(
            name=self.EDA_NAME,
            module=self.EDA_MOD,
            cls=self.EDA_CLS,
            config=agent_config)

        self._ia_client = ResourceAgentClient(self.external_dataset_id, process=FakeProcess())

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=DriverEvent.START_AUTOSAMPLE)
        self._ia_client.execute_resource(command=cmd)

        self.start_listener(self.dataset_id)

    def stop_agent(self):
        cmd = AgentCommand(command=DriverEvent.STOP_AUTOSAMPLE)
        self._ia_client.execute_resource(cmd)

        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        self._ia_client.execute_agent(cmd)

    def start_listener(self, dataset_id=''):
        dataset_modified = Event()
        #callback to use retrieve to get data from the coverage
        def cb(*args, **kwargs):
            self.get_retrieve_client(dataset_id=dataset_id)

        #callback to keep execution going once dataset has been fully ingested
        def cb2(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id)
        es.start()

        es2 = EventSubscriber(event_type=OT.DeviceCommonLifecycleEvent, callback=cb2, origin='BaseDataHandler._acquire_sample')
        es2.start()

        self.addCleanup(es.stop)
        self.addCleanup(es2.stop)

        #let it go for up to 120 seconds, then stop the agent and reset it
        dataset_modified.wait(120)
        self.stop_agent()

    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.process_dispatch_client.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.process_dispatch_client.schedule_process(process_definition_id=logger_procdef_id, configuration=configuration)

        return pid
class TestIntExternalObservatoryAgent(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml')

        self.dams_cli = DataAcquisitionManagementServiceClient()
        self.dpms_cli = DataProductManagementServiceClient()

        eda = ExternalDatasetAgent()
        self.eda_id = self.dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        self.eda_inst_id = self.dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=self.eda_id)


        self._setup_ncom()
        proc_name = self.ncom_ds_id+'_worker'
        config = {}
        config['process']={'name':proc_name,'type':'agent'}
        config['process']['eoa']={'dataset_id':self.ncom_ds_id}
        pid = self.container.spawn_process(name=proc_name, module='ion.agents.eoi.external_observatory_agent', cls='ExternalObservatoryAgent', config=config)

        queue_id = "%s.%s" % (self.container.id, pid)

        log.debug("Spawned worker process ==> proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        self._agent_cli = ResourceAgentClient(self.ncom_ds_id, name=pid, process=FakeProcess())
        log.debug("Got a ResourceAgentClient: res_id=%s" % self._agent_cli.resource_id)

    def _setup_ncom(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        #        dprov.institution.name = "OOI CGSN"
        dprov.contact.name = "Robert Weller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation())
        #        dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/"
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name="Rich Signell"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        dset = ExternalDataset(name="test", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        #        dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc"
        dset.dataset_description.parameters["dataset_path"] = "test_data/ncom.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "ion.agents.eoi.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        ds_id = self.ncom_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = self.dams_cli.register_external_data_set(external_dataset_id=ds_id)

        ## Associate everything
        # Convenience method
#        self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Or using each method
        self.dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        self.dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        self.dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        self.dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=self.eda_inst_id)
#        self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='ncom_product', description='raw ncom product')
        dproduct_id = self.dpms_cli.create_data_product(data_product=dprod)

        self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)


########## Tests ##########

#    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_get_capabilities(self):
        # Get all the capabilities
        caps = self._agent_cli.get_capabilities()
        log.debug("all capabilities: %s" % caps)
        lst=[['RES_CMD', 'acquire_data'], ['RES_CMD', 'acquire_data_by_request'],
            ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'], ['RES_CMD', 'compare'],
            ['RES_CMD', 'get_attributes'], ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
            ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._agent_cli.get_capabilities(capability_types=['RES_CMD'])
        log.debug("resource commands: %s" % caps)
        lst=[['RES_CMD', 'acquire_data'], ['RES_CMD', 'acquire_data_by_request'],
            ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'], ['RES_CMD', 'compare'],
            ['RES_CMD', 'get_attributes'], ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
            ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._agent_cli.get_capabilities(capability_types=['RES_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._agent_cli.get_capabilities(capability_types=['AGT_CMD'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._agent_cli.get_capabilities(capability_types=['AGT_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_get_attrs(self):
        cmd = AgentCommand(command_id="111", command="get_attributes")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._agent_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(handler_module=self.DVR_CONFIG['dvr_mod'],
            handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
        dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['date_pattern'] = '%Y %j'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch', ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='slocum_model')
        #        dsrc_model.model = 'SLOCUM'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in self._create_parameter_dictionary().iteritems():
            pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('slocum_param_dict', pc_list)

        streamdef_id = pubsub_cli.create_stream_definition(name="slocum_stream_def", description="stream def for slocum testing", parameter_dictionary_id=pdict_id)

        #        dpms_cli.create_data_product()

        # Generate the data product and associate it to the ExternalDataset

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dprod = IonObject(RT.DataProduct,
            name='slocum_parsed_product',
            description='parsed slocum product',
            temporal_domain=tdom,
            spatial_domain=sdom)

        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
            stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='slocum', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': streamdef_id,
            'external_dataset_res': dset,
            'data_producer_id': dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 20,
            }
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    # DataHandler config
    DVR_CONFIG = {"dvr_mod": "ion.agents.data.handlers.base_data_handler", "dvr_cls": "DummyDataHandler"}

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2deploy.yml")

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(node=self.container.node)

    #    @unittest.skip('not yet working. fix activate_data_product_persistence()')
    # @unittest.skip()
    @unittest.skip("not working")
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(
            RT.ExternalDatasetModel,
            name="ExampleDatasetModel",
            description="ExampleDatasetModel",
            datset_type="FibSeries",
        )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(
            RT.ExternalDatasetAgent,
            name="datasetagent007",
            description="datasetagent007",
            handler_module=EDA_MOD,
            handler_class=EDA_CLS,
        )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id))

        # Create ExternalDataset
        log.debug("TestExternalDatasetAgentMgmt: Create external dataset resource ")
        extDataset_obj = IonObject(RT.ExternalDataset, name="ExtDataset", description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" % ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        # register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(extDataset_id)

        # create a stream definition for the data from the ctd simulator

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name="SBE37_CDM", parameter_dictionary_id=pdict_id)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct, name="eoi dataset data", description=" stream test")

        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(
            RT.DataProduct, name="DP1", description="some new dp", temporal_domain=tdom, spatial_domain=sdom
        )

        data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)

        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        # todo fix the problem here....
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set("data", "external_data")

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG["dh_cfg"] = {
            "TESTING": True,
            "stream_id": stream_id,  # TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            "data_producer_id": dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            "taxonomy": tx.dump(),  # TODO: Currently does not support sets
            "max_records": 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            "driver_config": self.DVR_CONFIG,
            "stream_config": self._stream_config,
            "agent": {"resource_id": EDA_RESOURCE_ID},
            "test_mode": True,
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name="DatasetAgentInstance",
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config,
        )
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id,
        )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj))
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id))

        # Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ", str(id), str(active)
        )

        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ", str(id), str(active)
        )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess())
        print "TestExternalDatasetAgentMgmt: got ia client %s", self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command="initialize")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command="go_active")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="pause")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command="resume")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="clear")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="pause")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command="clear")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="reset")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        # -------------------------------
        # Deactivate InstrumentAgentInstance
        # -------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)

    def test_dataset_agent_prepare_support(self):

        eda_sup = self.damsclient.prepare_external_dataset_agent_support()

        eda_obj = IonObject(RT.ExternalDatasetAgent, name="ExternalDatasetAgent")
        eda_id = self.damsclient.create_external_dataset_agent(eda_obj)

        eda_sup = self.damsclient.prepare_external_dataset_agent_support(external_dataset_agent_id=eda_id)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support()

        edai_obj = IonObject(RT.ExternalDatasetAgentInstance, name="ExternalDatasetAgentInstance")
        edai_id = self.damsclient.create_external_dataset_agent_instance(edai_obj)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support(
            external_dataset_agent_instance_id=edai_id
        )
Example #6
0
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(name='example eda',handler_module=self.DVR_CONFIG['dvr_mod'],
            handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance(name='example eda instance')
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst,
            external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(name='example data provider', institution=Institution(),
            contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(name='example datasource', protocol_type='DAP', institution=Institution(),
            contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name,
            dataset_description=DatasetDescription(),
            update_description=UpdateDescription(),
            contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier', ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        #dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        pc_list = []

        #Get 'time' parameter context
        pc_list.append(dms_cli.read_parameter_context_by_name('time', id_only=True))

        for pc_k, pc in self._create_parameter_dictionary().iteritems():
            pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('netcdf_param_dict', pc_list)

        #create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="netcdf", description="netcdf", parameter_dictionary_id=pdict_id)

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dprod = IonObject(RT.DataProduct,
            name='usgs_parsed_product',
            description='parsed usgs product',
            temporal_domain=tdom,
            spatial_domain=sdom)

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
            stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='usgs', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': streamdef_id,
            'data_producer_id': dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 1,
            }
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(name='example eda',handler_module=self.DVR_CONFIG['dvr_mod'],
            handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance(name='example eda instance')
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst,
            external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(name='example data provider', institution=Institution(),
            contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(name='example datasource', protocol_type='DAP', institution=Institution(),
            contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name,
            dataset_description=DatasetDescription(),
            update_description=UpdateDescription(),
            contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier', ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        #dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        pc_list = []

        #Get 'time' parameter context
        pc_list.append(dms_cli.read_parameter_context_by_name('time', id_only=True))

        for pc_k, pc in self._create_parameter_dictionary().iteritems():
            pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('netcdf_param_dict', pc_list)

        #create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="netcdf", description="netcdf", parameter_dictionary_id=pdict_id)

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dprod = IonObject(RT.DataProduct,
            name='usgs_parsed_product',
            description='parsed usgs product',
            temporal_domain=tdom,
            spatial_domain=sdom)

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
            stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='usgs', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': streamdef_id,
            'data_producer_id': dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 1,
            }
class TestIntExternalObservatoryAgentService(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
#        self.container.start_rel_from_url(rel_url='res/deploy/r2deploy.yml')
        self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml')

        self.eoas_cli = ExternalObservatoryAgentServiceClient()
        self.rr_cli = ResourceRegistryServiceClient()
        self.pubsub_cli = PubsubManagementServiceClient()
        self.dams_cli = DataAcquisitionManagementServiceClient()

        self._setup_ncom()
        self._setup_hfr()

    def _setup_ncom(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
#        dprov.institution.name = "OOI CGSN"
        dprov.contact.name = "Robert Weller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation())
        #        dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/"
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name="Rich Signell"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        dset = ExternalDataset(name="test", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        #        dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc"
        dset.dataset_description.parameters["dataset_path"] = "test_data/ncom.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create ExternalDataSourceModel
        dsrc_model = ExternalDataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        #TODO: Uncomment when CRUD methods in DAMS are implemented
        #        self.ncom_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset)
        #        ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov)
        #        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)

        self.ncom_ds_id, _ = self.rr_cli.create(dset)
        ext_dprov_id, _ = self.rr_cli.create(dprov)
        ext_dsrc_id, _ = self.rr_cli.create(dsrc)
        #TODO: this needs to be added to DAMS
        ext_dsrc_model_id, _ = self.rr_cli.create(dsrc_model)

        ## Associate everything
        self.rr_cli.create_association(self.ncom_ds_id, PRED.hasSource, ext_dsrc_id)
        log.debug("Associated ExternalDataset %s with DataSource %s" % (self.ncom_ds_id, ext_dsrc_id))
        self.rr_cli.create_association(ext_dsrc_id, PRED.hasProvider, ext_dprov_id)
        log.debug("Associated DataSource %s with ExternalDataProvider %s" % (ext_dsrc_id, ext_dprov_id))
        self.rr_cli.create_association(ext_dsrc_id, PRED.hasModel, ext_dsrc_model_id)
        log.debug("Associated DataSource %s with ExternalDataSourceModel %s" % (ext_dsrc_id, ext_dsrc_model_id))
        data_prod_id = self.dams_cli.register_external_data_set(self.ncom_ds_id)
        log.debug("Registered ExternalDataset {%s}: DataProducer ID = %s" % (self.ncom_ds_id, data_prod_id))

    def _setup_hfr(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
#        dprov.institution.name = "HFR UCSD"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation())
        dsrc.connection_params["base_data_url"] = "http://hfrnet.ucsd.edu:8080/thredds/dodsC/"

        # Create ExternalDataset
        dset = ExternalDataset(name="UCSD HFR", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())
        dset.dataset_description.parameters["dataset_path"] = "HFRNet/USEGC/6km/hourly/RTV"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create ExternalDataSourceModel
        dsrc_model = ExternalDataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        #TODO: Uncomment when CRUD methods in DAMS are implemented
        #        self.ncom_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset)
        #        ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov)
        #        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)

        self.hfr_ds_id, _ = self.rr_cli.create(dset)
        ext_dprov_id, _ = self.rr_cli.create(dprov)
        ext_dsrc_id, _ = self.rr_cli.create(dsrc)
        #TODO: this needs to be added to DAMS
        ext_dsrc_model_id, _ = self.rr_cli.create(dsrc_model)

        self.rr_cli.create_association(self.hfr_ds_id, PRED.hasSource, ext_dsrc_id)
        log.debug("Associated ExternalDataset %s with DataSource %s" % (self.hfr_ds_id, ext_dsrc_id))
        self.rr_cli.create_association(ext_dsrc_id, PRED.hasProvider, ext_dprov_id)
        log.debug("Associated DataSource %s with ExternalDataProvider %s" % (ext_dsrc_id, ext_dprov_id))
        self.rr_cli.create_association(ext_dsrc_id, PRED.hasModel, ext_dsrc_model_id)
        log.debug("Associated DataSource %s with ExternalDataSourceModel %s" % (ext_dsrc_id, ext_dsrc_model_id))
        data_prod_id = self.dams_cli.register_external_data_set(self.hfr_ds_id)
        log.debug("Registered ExternalDataset {%s}: DataProducer ID = %s" % (self.hfr_ds_id, data_prod_id))



########## Tests ##########

#    @unittest.skip("")
    def test_spawn_worker(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))
        self.assertEquals(proc_name, self.ncom_ds_id+'_worker')

    @unittest.skip("Underlying method not yet implemented")
    def test_get_worker(self, resource_id=''):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.get_worker(resource_id=self.ncom_ds_id)

    @unittest.skip("Underlying method not yet implemented")
    def test_get_capabilities(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.get_capabilities()

#    @unittest.skip("")
    def test_execute_single_worker(self):
        ds_id = self.ncom_ds_id

        log.debug("test_spawn_worker with ds_id: %s" % ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

#        with self.assertRaises(IonException):
#            self.eoas_cli.execute()

        cmd = AgentCommand(command_id="111", command="get_attributes", kwargs={"ds_id":ds_id})
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self.eoas_cli.execute(command=cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, "SUCCESS")
        self.assertTrue(type(ret.result[0]), dict)

        cmd = AgentCommand(command_id="112", command="get_signature", kwargs={"ds_id":ds_id})
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self.eoas_cli.execute(command=cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, "SUCCESS")
        self.assertTrue(type(ret.result[0]), dict)

#    @unittest.skip("")
    def test_execute_multi_worker(self):
        log.debug("test_spawn_worker #1 with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        log.debug("test_spawn_worker #2 with ds_id: %s" % self.hfr_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.hfr_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        cmd = AgentCommand(command_id="112", command="get_signature", kwargs={"ds_id":self.ncom_ds_id})
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self.eoas_cli.execute(command=cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, "SUCCESS")
        self.assertTrue(type(ret.result[0]), dict)

        cmd = AgentCommand(command_id="112", command="get_signature", kwargs={"ds_id":self.hfr_ds_id})
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self.eoas_cli.execute(command=cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, "SUCCESS")
        self.assertTrue(type(ret.result[0]), dict)

#    @unittest.skip("")
    def test_execute_acquire_data(self):
        ds_id = self.ncom_ds_id

        log.debug("test_spawn_worker with ds_id: %s" % ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        cmd = AgentCommand(command_id="113", command="acquire_data", kwargs={"ds_id":ds_id})
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self.eoas_cli.execute(command=cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, "SUCCESS")

    @unittest.skip("Underlying method not yet implemented")
    def test_set_param(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.set_param(name="param", value="value")

    @unittest.skip("Underlying method not yet implemented")
    def test_get_param(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.get_param(name="param")

    @unittest.skip("Underlying method not yet implemented")
    def test_execute_agent(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.execute_agent()

    @unittest.skip("Underlying method not yet implemented")
    def test_set_agent_param(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.set_agent_param(name="param", value="value")

    @unittest.skip("Underlying method not yet implemented")
    def test_get_agent_param(self):
        log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id)
        proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id)
        log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id))

        with self.assertRaises(IonException):
            self.eoas_cli.get_agent_param(name="param")
Example #9
0
class TestIntExternalObservatoryAgent(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml')

        self.dams_cli = DataAcquisitionManagementServiceClient()
        self.dpms_cli = DataProductManagementServiceClient()

        eda = ExternalDatasetAgent()
        self.eda_id = self.dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        self.eda_inst_id = self.dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=self.eda_id)

        self._setup_ncom()
        proc_name = self.ncom_ds_id + '_worker'
        config = {}
        config['process'] = {'name': proc_name, 'type': 'agent'}
        config['process']['eoa'] = {'dataset_id': self.ncom_ds_id}
        pid = self.container.spawn_process(
            name=proc_name,
            module='eoi.agent.external_observatory_agent',
            cls='ExternalObservatoryAgent',
            config=config)

        queue_id = "%s.%s" % (self.container.id, pid)

        log.debug(
            "Spawned worker process ==> proc_name: %s\tproc_id: %s\tqueue_id: %s"
            % (proc_name, pid, queue_id))

        self._agent_cli = ResourceAgentClient(self.ncom_ds_id,
                                              name=pid,
                                              process=FakeProcess())
        log.debug("Got a ResourceAgentClient: res_id=%s" %
                  self._agent_cli.resource_id)

    def _setup_ncom(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(),
                                     contact=ContactInformation())
        #        dprov.institution.name = "OOI CGSN"
        dprov.contact.name = "Robert Weller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP",
                          institution=Institution(),
                          contact=ContactInformation())
        #        dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/"
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name = "Rich Signell"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        dset = ExternalDataset(name="test",
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())

        #        dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc"
        dset.dataset_description.parameters[
            "dataset_path"] = "test_data/ncom.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        ds_id = self.ncom_ds_id = self.dams_cli.create_external_dataset(
            external_dataset=dset)
        ext_dprov_id = self.dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = self.dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        ## Associate everything
        # Convenience method
        #        self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Or using each method
        self.dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        self.dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        self.dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        self.dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=self.eda_inst_id)
        #        self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='ncom_product',
                            description='raw ncom product')
        dproduct_id = self.dpms_cli.create_data_product(data_product=dprod)

        self.dams_cli.assign_data_product(input_resource_id=ds_id,
                                          data_product_id=dproduct_id,
                                          create_stream=True)

########## Tests ##########

#    @unittest.skip("Currently broken due to resource/agent refactorings")

    def test_get_capabilities(self):
        # Get all the capabilities
        caps = self._agent_cli.get_capabilities()
        log.debug("all capabilities: %s" % caps)
        lst = [['RES_CMD', 'acquire_data'],
               ['RES_CMD', 'acquire_data_by_request'],
               ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'],
               ['RES_CMD', 'compare'], ['RES_CMD', 'get_attributes'],
               ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
               ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._agent_cli.get_capabilities(capability_types=['RES_CMD'])
        log.debug("resource commands: %s" % caps)
        lst = [['RES_CMD', 'acquire_data'],
               ['RES_CMD', 'acquire_data_by_request'],
               ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'],
               ['RES_CMD', 'compare'], ['RES_CMD', 'get_attributes'],
               ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
               ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._agent_cli.get_capabilities(capability_types=['RES_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._agent_cli.get_capabilities(capability_types=['AGT_CMD'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._agent_cli.get_capabilities(capability_types=['AGT_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_get_attrs(self):
        cmd = AgentCommand(command_id="111", command="get_attributes")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._agent_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(handler_module=self.DVR_CONFIG['dvr_mod'], handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'dummy_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['base_url'] = 'test_data/dummy'
        dset.dataset_description.parameters['list_pattern'] = 'test*.dum'
        dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'test([\d]{4})-([\d]{2})-([\d]{2})-([\d]{2}).dum'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['variables'] = [
            'dummy',
            ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        #dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)

        #create temp streamdef so the data product can create the stream
        pc_list = []

        #Get 'time' parameter context
        pc_list.append(dms_cli.read_parameter_context_by_name('time', id_only=True))
        pc_list.append(dms_cli.read_parameter_context_by_name('dummy', id_only=True))

        for pc_k, pc in self._create_parameter_dictionary().iteritems():
            pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('dummy_param_dict', pc_list)

        streamdef_id = pubsub_cli.create_stream_definition(name="dummy", parameter_dictionary_id=pdict_id, description="dummy")

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dprod = DataProduct(
            name='dummy_dataset',
            description='dummy data product',
            temporal_domain=tdom,
            spatial_domain=sdom)

        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
            stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)  # , create_stream=True)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))

        # Create the logger for receiving publications
        _, stream_route, stream_def = self.create_stream_and_logger(name='dummy', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': stream_def,
            'data_producer_id': dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 4,
            }

        folder = 'test_data/dummy'

        if os.path.isdir(folder):
            for the_file in os.listdir(folder):
                file_path = os.path.join(folder, the_file)
                try:
                    if os.path.isfile(file_path):
                        os.unlink(file_path)
                except Exception as e:
                    log.debug('_setup_resources error: {0}'.format(e))

        if not os.path.exists(folder):
            os.makedirs(folder)

        self.add_dummy_file('test_data/dummy/test2012-02-01-12.dum')
        self.add_dummy_file('test_data/dummy/test2012-02-01-13.dum')
        self.add_dummy_file('test_data/dummy/test2012-02-01-14.dum')
        self.add_dummy_file('test_data/dummy/test2012-02-01-15.dum')
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = "Christopher Mueller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="FILE", institution=Institution(), contact=ContactInformation())
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name = "Tim Giguere"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        ds_name = "slocum_test_dataset"
        dset = ExternalDataset(
            name=ds_name,
            dataset_description=DatasetDescription(),
            update_description=UpdateDescription(),
            contact=ContactInformation(),
        )

        dset.dataset_description.parameters["base_url"] = "test_data/slocum/"
        dset.dataset_description.parameters["list_pattern"] = "ru05-2012-021-0-0-sbd.dat"
        dset.dataset_description.parameters["date_pattern"] = "%Y %j"
        dset.dataset_description.parameters["date_extraction_pattern"] = "ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat"
        dset.dataset_description.parameters["temporal_dimension"] = None
        dset.dataset_description.parameters["zonal_dimension"] = None
        dset.dataset_description.parameters["meridional_dimension"] = None
        dset.dataset_description.parameters["vertical_dimension"] = None
        dset.dataset_description.parameters["variables"] = [
            "c_wpt_y_lmc",
            "sci_water_cond",
            "m_y_lmc",
            "u_hd_fin_ap_inflection_holdoff",
            "sci_m_present_time",
            "m_leakdetect_voltage_forward",
            "sci_bb3slo_b660_scaled",
            "c_science_send_all",
            "m_gps_status",
            "m_water_vx",
            "m_water_vy",
            "c_heading",
            "sci_fl3slo_chlor_units",
            "u_hd_fin_ap_gain",
            "m_vacuum",
            "u_min_water_depth",
            "m_gps_lat",
            "m_veh_temp",
            "f_fin_offset",
            "u_hd_fin_ap_hardover_holdoff",
            "c_alt_time",
            "m_present_time",
            "m_heading",
            "sci_bb3slo_b532_scaled",
            "sci_fl3slo_cdom_units",
            "m_fin",
            "x_cycle_overrun_in_ms",
            "sci_water_pressure",
            "u_hd_fin_ap_igain",
            "sci_fl3slo_phyco_units",
            "m_battpos",
            "sci_bb3slo_b470_scaled",
            "m_lat",
            "m_gps_lon",
            "sci_ctd41cp_timestamp",
            "m_pressure",
            "c_wpt_x_lmc",
            "c_ballast_pumped",
            "x_lmc_xy_source",
            "m_lon",
            "m_avg_speed",
            "sci_water_temp",
            "u_pitch_ap_gain",
            "m_roll",
            "m_tot_num_inflections",
            "m_x_lmc",
            "u_pitch_ap_deadband",
            "m_final_water_vy",
            "m_final_water_vx",
            "m_water_depth",
            "m_leakdetect_voltage",
            "u_pitch_max_delta_battpos",
            "m_coulomb_amphr",
            "m_pitch",
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="slocum_model")
        dsrc_model.model = "SLOCUM"
        dsrc_model.data_handler_module = "N/A"
        dsrc_model.data_handler_class = "N/A"

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id
        )
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        # Generate the data product and associate it to the ExternalDataset

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(
            RT.DataProduct,
            name="slocum_parsed_product",
            description="parsed slocum product",
            temporal_domain=tdom,
            spatial_domain=sdom,
        )

        dproduct_id = dpms_cli.create_data_product(
            data_product=dprod, stream_definition_id=streamdef_id, parameter_dictionary=parameter_dictionary
        )

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(
            subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True
        )
        stream_id = stream_id[0]

        log.info(
            "Created resources: {0}".format(
                {
                    "ExternalDataset": ds_id,
                    "ExternalDataProvider": ext_dprov_id,
                    "DataSource": ext_dsrc_id,
                    "DataSourceModel": ext_dsrc_model_id,
                    "DataProducer": dproducer_id,
                    "DataProduct": dproduct_id,
                    "Stream": stream_id,
                }
            )
        )

        # CBM: Use CF standard_names

        #        ttool = TaxyTool()
        #
        #        ttool.add_taxonomy_set('c_wpt_y_lmc'),
        #        ttool.add_taxonomy_set('sci_water_cond'),
        #        ttool.add_taxonomy_set('m_y_lmc'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_inflection_holdoff'),
        #        ttool.add_taxonomy_set('sci_m_present_time'),
        #        ttool.add_taxonomy_set('m_leakdetect_voltage_forward'),
        #        ttool.add_taxonomy_set('sci_bb3slo_b660_scaled'),
        #        ttool.add_taxonomy_set('c_science_send_all'),
        #        ttool.add_taxonomy_set('m_gps_status'),
        #        ttool.add_taxonomy_set('m_water_vx'),
        #        ttool.add_taxonomy_set('m_water_vy'),
        #        ttool.add_taxonomy_set('c_heading'),
        #        ttool.add_taxonomy_set('sci_fl3slo_chlor_units'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_gain'),
        #        ttool.add_taxonomy_set('m_vacuum'),
        #        ttool.add_taxonomy_set('u_min_water_depth'),
        #        ttool.add_taxonomy_set('m_gps_lat'),
        #        ttool.add_taxonomy_set('m_veh_temp'),
        #        ttool.add_taxonomy_set('f_fin_offset'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_hardover_holdoff'),
        #        ttool.add_taxonomy_set('c_alt_time'),
        #        ttool.add_taxonomy_set('m_present_time'),
        #        ttool.add_taxonomy_set('m_heading'),
        #        ttool.add_taxonomy_set('sci_bb3slo_b532_scaled'),
        #        ttool.add_taxonomy_set('sci_fl3slo_cdom_units'),
        #        ttool.add_taxonomy_set('m_fin'),
        #        ttool.add_taxonomy_set('x_cycle_overrun_in_ms'),
        #        ttool.add_taxonomy_set('sci_water_pressure'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_igain'),
        #        ttool.add_taxonomy_set('sci_fl3slo_phyco_units'),
        #        ttool.add_taxonomy_set('m_battpos'),
        #        ttool.add_taxonomy_set('sci_bb3slo_b470_scaled'),
        #        ttool.add_taxonomy_set('m_lat'),
        #        ttool.add_taxonomy_set('m_gps_lon'),
        #        ttool.add_taxonomy_set('sci_ctd41cp_timestamp'),
        #        ttool.add_taxonomy_set('m_pressure'),
        #        ttool.add_taxonomy_set('c_wpt_x_lmc'),
        #        ttool.add_taxonomy_set('c_ballast_pumped'),
        #        ttool.add_taxonomy_set('x_lmc_xy_source'),
        #        ttool.add_taxonomy_set('m_lon'),
        #        ttool.add_taxonomy_set('m_avg_speed'),
        #        ttool.add_taxonomy_set('sci_water_temp'),
        #        ttool.add_taxonomy_set('u_pitch_ap_gain'),
        #        ttool.add_taxonomy_set('m_roll'),
        #        ttool.add_taxonomy_set('m_tot_num_inflections'),
        #        ttool.add_taxonomy_set('m_x_lmc'),
        #        ttool.add_taxonomy_set('u_pitch_ap_deadband'),
        #        ttool.add_taxonomy_set('m_final_water_vy'),
        #        ttool.add_taxonomy_set('m_final_water_vx'),
        #        ttool.add_taxonomy_set('m_water_depth'),
        #        ttool.add_taxonomy_set('m_leakdetect_voltage'),
        #        ttool.add_taxonomy_set('u_pitch_max_delta_battpos'),
        #        ttool.add_taxonomy_set('m_coulomb_amphr'),
        #        ttool.add_taxonomy_set('m_pitch'),

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext("c_wpt_y_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_water_cond", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_y_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "u_hd_fin_ap_inflection_holdoff", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_m_present_time", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "m_leakdetect_voltage_forward", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_bb3slo_b660_scaled", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_science_send_all", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_gps_status", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_water_vx", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_water_vy", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_heading", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_fl3slo_chlor_units", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_hd_fin_ap_gain", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_vacuum", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_min_water_depth", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_gps_lat", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_veh_temp", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("f_fin_offset", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "u_hd_fin_ap_hardover_holdoff", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_alt_time", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_present_time", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_heading", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_bb3slo_b532_scaled", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_fl3slo_cdom_units", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_fin", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "x_cycle_overrun_in_ms", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_water_pressure", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_hd_fin_ap_igain", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_fl3slo_phyco_units", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_battpos", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_bb3slo_b470_scaled", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_lat", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_gps_lon", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_ctd41cp_timestamp", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_pressure", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_wpt_x_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_ballast_pumped", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("x_lmc_xy_source", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_lon", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_avg_speed", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_water_temp", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_pitch_ap_gain", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_roll", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "m_tot_num_inflections", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_x_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_pitch_ap_deadband", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_final_water_vy", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_final_water_vx", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_water_depth", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "m_leakdetect_voltage", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "u_pitch_max_delta_battpos", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_coulomb_amphr", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_pitch", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        # CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        self.create_stream_and_logger(name="slocum", stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG["dh_cfg"] = {
            "TESTING": True,
            "stream_id": stream_id,
            "external_dataset_res": dset,
            "param_dictionary": pdict.dump(),
            "data_producer_id": dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            "max_records": 20,
        }
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)

    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries" )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id) )

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module="ion.agents.eoi.external_dataset_agent", handler_class="ExternalDatasetAgent" )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id) )


        # Create ExternalDataset
        log.debug('TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset" )
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        #register the dataset as a data producer
        self.damsclient.register_external_data_set(extDataset_id)


        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config' : DVR_CONFIG,
            'stream_config' : self._stream_config,
            'agent'         : {'resource_id': EDA_RESOURCE_ID},
            'test_mode' : True
        }

        extDatasetAgentInstance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config = DVR_CONFIG, dataset_agent_config = agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj) )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id) )
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct,name='eoi dataset data',description=' stream test')
        try:
            data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1) )

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1, persist_data=True, persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids) )


        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)


        dataset_agent_instance_obj= self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj) )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,  process=FakeProcess())
        print 'activate_instrument: got ia client %s', self._dsa_client
        log.debug("test_activateInstrument: got dataset client %s", str(self._dsa_client))

        cmd=AgentCommand(command='initialize')
        _ = self._dsa_client.execute_agent(cmd)

        cmd = AgentCommand(command='go_active')
        _ = self._dsa_client.execute_agent(cmd)

        cmd = AgentCommand(command='run')
        _ = self._dsa_client.execute_agent(cmd)

        log.info('Send an unconstrained request for data (\'new data\')')
        config={'stream_id':'first_new','TESTING':True}
        cmd = AgentCommand(command='acquire_data', args=[config])
        self._dsa_client.execute(cmd)

        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        config={'stream_id':'second_new','TESTING':True}
        cmd = AgentCommand(command='acquire_data', args=[config])
        self._dsa_client.execute(cmd)

        cmd = AgentCommand(command='reset')
        _ = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    # DataHandler config
    DVR_CONFIG = {
        'dvr_mod' : 'ion.agents.data.handlers.base_data_handler',
        'dvr_cls' : 'DummyDataHandler',
    }

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)

    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries" )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id) )

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module=EDA_MOD, handler_class=EDA_CLS )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id) )


        # Create ExternalDataset
        log.debug('TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset" )
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        #register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(extDataset_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct,name='eoi dataset data',description=' stream test')
        try:
            data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1) )

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1, persist_data=True, persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids) )
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set('data', 'external_data')

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,#TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            'data_producer_id':dproducer_id,
#            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            'taxonomy':tx.dump(), #TODO: Currently does not support sets
            'max_records':4,
            }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : self._stream_config,
            'agent'         : {'resource_id': EDA_RESOURCE_ID},
            'test_mode' : True
        }

        extDatasetAgentInstance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config = self.DVR_CONFIG, dataset_agent_config = agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj) )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id) )

        #Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ", str(id), str(active) )

        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)


        dataset_agent_instance_obj= self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj) )

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ", str(id), str(active) )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,  process=FakeProcess())
        print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client))

#        cmd=AgentCommand(command='initialize')
#        _ = self._dsa_client.execute_agent(cmd)
#
#        cmd = AgentCommand(command='go_active')
#        _ = self._dsa_client.execute_agent(cmd)
#
#        cmd = AgentCommand(command='run')
#        _ = self._dsa_client.execute_agent(cmd)
#
#        log.info('Send an unconstrained request for data (\'new data\')')
#        cmd = AgentCommand(command='acquire_data')
#        self._dsa_client.execute(cmd)
#
#        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
#        cmd = AgentCommand(command='acquire_data')
#        self._dsa_client.execute(cmd)
#
#        cmd = AgentCommand(command='reset')
#        _ = self._dsa_client.execute_agent(cmd)
#        cmd = AgentCommand(command='get_current_state')
#        retval = self._dsa_client.execute_agent(cmd)
#        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='resume')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='reset')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)




        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)
Example #14
0
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        dset.dataset_description.parameters['dataset_path'] = 'test_data/ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch',
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='slocum_model')
        dsrc_model.model = 'SLOCUM'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='slocum_parsed_product', description='parsed slocum product')
        dproduct_id = dpms_cli.create_data_product(data_product=dprod)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

        ttool = TaxyTool()

        ttool.add_taxonomy_set('c_wpt_y_lmc'),
        ttool.add_taxonomy_set('sci_water_cond'),
        ttool.add_taxonomy_set('m_y_lmc'),
        ttool.add_taxonomy_set('u_hd_fin_ap_inflection_holdoff'),
        ttool.add_taxonomy_set('sci_m_present_time'),
        ttool.add_taxonomy_set('m_leakdetect_voltage_forward'),
        ttool.add_taxonomy_set('sci_bb3slo_b660_scaled'),
        ttool.add_taxonomy_set('c_science_send_all'),
        ttool.add_taxonomy_set('m_gps_status'),
        ttool.add_taxonomy_set('m_water_vx'),
        ttool.add_taxonomy_set('m_water_vy'),
        ttool.add_taxonomy_set('c_heading'),
        ttool.add_taxonomy_set('sci_fl3slo_chlor_units'),
        ttool.add_taxonomy_set('u_hd_fin_ap_gain'),
        ttool.add_taxonomy_set('m_vacuum'),
        ttool.add_taxonomy_set('u_min_water_depth'),
        ttool.add_taxonomy_set('m_gps_lat'),
        ttool.add_taxonomy_set('m_veh_temp'),
        ttool.add_taxonomy_set('f_fin_offset'),
        ttool.add_taxonomy_set('u_hd_fin_ap_hardover_holdoff'),
        ttool.add_taxonomy_set('c_alt_time'),
        ttool.add_taxonomy_set('m_present_time'),
        ttool.add_taxonomy_set('m_heading'),
        ttool.add_taxonomy_set('sci_bb3slo_b532_scaled'),
        ttool.add_taxonomy_set('sci_fl3slo_cdom_units'),
        ttool.add_taxonomy_set('m_fin'),
        ttool.add_taxonomy_set('x_cycle_overrun_in_ms'),
        ttool.add_taxonomy_set('sci_water_pressure'),
        ttool.add_taxonomy_set('u_hd_fin_ap_igain'),
        ttool.add_taxonomy_set('sci_fl3slo_phyco_units'),
        ttool.add_taxonomy_set('m_battpos'),
        ttool.add_taxonomy_set('sci_bb3slo_b470_scaled'),
        ttool.add_taxonomy_set('m_lat'),
        ttool.add_taxonomy_set('m_gps_lon'),
        ttool.add_taxonomy_set('sci_ctd41cp_timestamp'),
        ttool.add_taxonomy_set('m_pressure'),
        ttool.add_taxonomy_set('c_wpt_x_lmc'),
        ttool.add_taxonomy_set('c_ballast_pumped'),
        ttool.add_taxonomy_set('x_lmc_xy_source'),
        ttool.add_taxonomy_set('m_lon'),
        ttool.add_taxonomy_set('m_avg_speed'),
        ttool.add_taxonomy_set('sci_water_temp'),
        ttool.add_taxonomy_set('u_pitch_ap_gain'),
        ttool.add_taxonomy_set('m_roll'),
        ttool.add_taxonomy_set('m_tot_num_inflections'),
        ttool.add_taxonomy_set('m_x_lmc'),
        ttool.add_taxonomy_set('u_pitch_ap_deadband'),
        ttool.add_taxonomy_set('m_final_water_vy'),
        ttool.add_taxonomy_set('m_final_water_vx'),
        ttool.add_taxonomy_set('m_water_depth'),
        ttool.add_taxonomy_set('m_leakdetect_voltage'),
        ttool.add_taxonomy_set('u_pitch_max_delta_battpos'),
        ttool.add_taxonomy_set('m_coulomb_amphr'),
        ttool.add_taxonomy_set('m_pitch'),

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='slocum',stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'external_dataset_res':dset,
            'taxonomy':ttool.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
        }
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'ruv_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/ruv/'
        dset.dataset_description.parameters['list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv'
        dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='ruv_model')
        dsrc_model.model = 'RUV'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(RT.DataProduct,
            name='ruv_parsed_product',
            description='parsed ruv product',
            temporal_domain = tdom,
            spatial_domain = sdom)

        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
                                                    stream_definition_id=streamdef_id,
                                                    parameter_dictionary=parameter_dictionary)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

        #ttool = TaxyTool()
        #
        #ttool.add_taxonomy_set('data','test data')
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='ruv',stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'external_dataset_res':dset,
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
        }
class TestIntExternalObservatoryAgentService(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml')

        #        self.eoas_cli = ExternalObservatoryAgentServiceClient()
        #        self.rr_cli = ResourceRegistryServiceClient()
        self.dams_cli = DataAcquisitionManagementServiceClient()
        self.dpms_cli = DataProductManagementServiceClient()

        self._setup_ncom()
        self._setup_hfr()

        #        eoas_proc = self.container.proc_manager.procs_by_name['external_data_agent_management']
        #        log.debug("Got EOAS Process: %s" % eoas_proc)
        self._ncom_agt_cli = ResourceAgentClient(
            resource_id=self.ncom_ds_id,
            name='external_observatory_agent',
            process=FakeProcess())
        log.debug("Got a ResourceAgentClient: res_id=%s" %
                  self._ncom_agt_cli.resource_id)

        self._hfr_agt_cli = ResourceAgentClient(
            resource_id=self.hfr_ds_id,
            name='external_observatory_agent',
            process=FakeProcess())
        log.debug("Got a ResourceAgentClient: res_id=%s" %
                  self._hfr_agt_cli.resource_id)

    def _setup_ncom(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        eda = ExternalDatasetAgent()
        eda_id = self.dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = self.dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=eda_id)

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(),
                                     contact=ContactInformation())
        #        dprov.institution.name = "OOI CGSN"
        dprov.contact.name = "Robert Weller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP",
                          institution=Institution(),
                          contact=ContactInformation())
        #        dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/"
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name = "Rich Signell"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        dset = ExternalDataset(name="test",
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())

        #        dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc"
        dset.dataset_description.parameters[
            "dataset_path"] = "test_data/ncom.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        ds_id = self.ncom_ds_id = self.dams_cli.create_external_dataset(
            external_dataset=dset)
        ext_dprov_id = self.dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = self.dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        ## Associate everything
        # Convenience method
        #        self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Or using each method
        self.dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        self.dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        self.dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        self.dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id)
        #        self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='ncom_product',
                            description='raw ncom product')
        dproduct_id = self.dpms_cli.create_data_product(data_product=dprod)

        self.dams_cli.assign_data_product(input_resource_id=ds_id,
                                          data_product_id=dproduct_id,
                                          create_stream=True)

    def _setup_hfr(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        eda = ExternalDatasetAgent()
        eda_id = self.dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = self.dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=eda_id)

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(),
                                     contact=ContactInformation())
        #        dprov.institution.name = "HFR UCSD"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP",
                          institution=Institution(),
                          contact=ContactInformation())
        dsrc.connection_params[
            "base_data_url"] = "http://hfrnet.ucsd.edu:8080/thredds/dodsC/"

        # Create ExternalDataset
        dset = ExternalDataset(name="UCSD HFR",
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())
        dset.dataset_description.parameters[
            "dataset_path"] = "HFRNet/USEGC/6km/hourly/RTV"
        #        dset.dataset_description.parameters["dataset_path"] = "test_data/hfr.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        ds_id = self.hfr_ds_id = self.dams_cli.create_external_dataset(
            external_dataset=dset)
        ext_dprov_id = self.dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = self.dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        ## Associate everything
        # Convenience method
        #        self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Or using each method
        self.dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        self.dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        self.dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        self.dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id)
        #        self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='hfr_product', description='raw hfr product')
        dproduct_id = self.dpms_cli.create_data_product(data_product=dprod)

        self.dams_cli.assign_data_product(input_resource_id=ds_id,
                                          data_product_id=dproduct_id,
                                          create_stream=True)

########## Tests ##########

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_get_capabilities(self):
        # Get all the capabilities
        caps = self._ncom_agt_cli.get_capabilities()
        log.debug("all capabilities: %s" % caps)
        lst = [['RES_CMD', 'acquire_data'],
               ['RES_CMD', 'acquire_data_by_request'],
               ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'],
               ['RES_CMD', 'compare'], ['RES_CMD', 'get_attributes'],
               ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
               ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._ncom_agt_cli.get_capabilities(
            capability_types=['RES_CMD'])
        log.debug("resource commands: %s" % caps)
        lst = [['RES_CMD', 'acquire_data'],
               ['RES_CMD', 'acquire_data_by_request'],
               ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'],
               ['RES_CMD', 'compare'], ['RES_CMD', 'get_attributes'],
               ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
               ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._ncom_agt_cli.get_capabilities(
            capability_types=['RES_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._ncom_agt_cli.get_capabilities(
            capability_types=['AGT_CMD'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._ncom_agt_cli.get_capabilities(
            capability_types=['AGT_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_get_attrs(self):
        cmd = AgentCommand(command_id="111", command="get_attributes")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_get_fingerprint(self):
        cmd = AgentCommand(command_id="111", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_single_worker(self):
        cmd = AgentCommand(command_id="111", command="get_attributes")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="112", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_multi_worker(self):
        cmd = AgentCommand(command_id="111", command="has_new_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="111", command="has_new_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._hfr_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="112", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="112", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._hfr_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_acquire_data(self):
        cmd = AgentCommand(command_id="113", command="acquire_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_acquire_new_data(self):
        cmd = AgentCommand(command_id="113", command="acquire_new_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)

    @unittest.skip("Underlying method not yet implemented")
    def test_set_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.set_param(resource_id=res_id,
                                    name="param",
                                    value="value")

    @unittest.skip("Underlying method not yet implemented")
    def test_get_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.get_param(resource_id=res_id, name="param")

    @unittest.skip("Underlying method not yet implemented")
    def test_execute_agent(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.execute_agent(resource_id=res_id)

    @unittest.skip("Underlying method not yet implemented")
    def test_set_agent_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.set_agent_param(resource_id=res_id,
                                          name="param",
                                          value="value")

    @unittest.skip("Underlying method not yet implemented")
    def test_get_agent_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.get_agent_param(resource_id=res_id, name="param")
class TestIntExternalObservatoryAgentService(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml')

#        self.eoas_cli = ExternalObservatoryAgentServiceClient()
#        self.rr_cli = ResourceRegistryServiceClient()
        self.dams_cli = DataAcquisitionManagementServiceClient()
        self.dpms_cli = DataProductManagementServiceClient()

        self._setup_ncom()
        self._setup_hfr()

#        eoas_proc = self.container.proc_manager.procs_by_name['external_data_agent_management']
#        log.debug("Got EOAS Process: %s" % eoas_proc)
        self._ncom_agt_cli = ResourceAgentClient(resource_id=self.ncom_ds_id, name='external_observatory_agent', process=FakeProcess())
        log.debug("Got a ResourceAgentClient: res_id=%s" % self._ncom_agt_cli.resource_id)

        self._hfr_agt_cli = ResourceAgentClient(resource_id=self.hfr_ds_id, name='external_observatory_agent', process=FakeProcess())
        log.debug("Got a ResourceAgentClient: res_id=%s" % self._hfr_agt_cli.resource_id)

    def _setup_ncom(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        eda = ExternalDatasetAgent()
        eda_id = self.dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = self.dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)


        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
#        dprov.institution.name = "OOI CGSN"
        dprov.contact.name = "Robert Weller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation())
        #        dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/"
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name="Rich Signell"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        dset = ExternalDataset(name="test", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        #        dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc"
        dset.dataset_description.parameters["dataset_path"] = "test_data/ncom.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        ds_id = self.ncom_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = self.dams_cli.register_external_data_set(external_dataset_id=ds_id)

        ## Associate everything
        # Convenience method
#        self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)


        # Or using each method
        self.dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        self.dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        self.dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        self.dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
#        self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id)
#        self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='ncom_product', description='raw ncom product')
        dproduct_id = self.dpms_cli.create_data_product(data_product=dprod)

        self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)

    def _setup_hfr(self):
        # TODO: some or all of this (or some variation) should move to DAMS

        # Create and register the necessary resources/objects

        eda = ExternalDatasetAgent()
        eda_id = self.dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = self.dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
#        dprov.institution.name = "HFR UCSD"

        # Create DataSource
        dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation())
        dsrc.connection_params["base_data_url"] = "http://hfrnet.ucsd.edu:8080/thredds/dodsC/"

        # Create ExternalDataset
        dset = ExternalDataset(name="UCSD HFR", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())
        dset.dataset_description.parameters["dataset_path"] = "HFRNet/USEGC/6km/hourly/RTV"
#        dset.dataset_description.parameters["dataset_path"] = "test_data/hfr.nc"
        dset.dataset_description.parameters["temporal_dimension"] = "time"
        dset.dataset_description.parameters["zonal_dimension"] = "lon"
        dset.dataset_description.parameters["meridional_dimension"] = "lat"

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="dap_model")
        dsrc_model.model = "DAP"
        dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler"
        dsrc_model.data_handler_class = "DapExternalDataHandler"

        ## Run everything through DAMS
        ds_id = self.hfr_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = self.dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = self.dams_cli.register_external_data_set(external_dataset_id=ds_id)

        ## Associate everything
        # Convenience method
#        self.dams_cli.assign_eoi_resources(external_data_provider_id=ext_dprov_id, data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id, external_dataset_id=ds_id, external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Or using each method
        self.dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        self.dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        self.dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        self.dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
#        self.dams_cli.assign_external_dataset_agent_to_data_model(external_data_agent_id=eda_id, data_source_model_id=ext_dsrc_model_id)
#        self.dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=eda_id, agent_instance_id=eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='hfr_product', description='raw hfr product')
        dproduct_id = self.dpms_cli.create_data_product(data_product=dprod)

        self.dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)



########## Tests ##########

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_get_capabilities(self):
        # Get all the capabilities
        caps = self._ncom_agt_cli.get_capabilities()
        log.debug("all capabilities: %s" % caps)
        lst=[['RES_CMD', 'acquire_data'], ['RES_CMD', 'acquire_data_by_request'],
            ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'], ['RES_CMD', 'compare'],
            ['RES_CMD', 'get_attributes'], ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
            ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._ncom_agt_cli.get_capabilities(capability_types=['RES_CMD'])
        log.debug("resource commands: %s" % caps)
        lst=[['RES_CMD', 'acquire_data'], ['RES_CMD', 'acquire_data_by_request'],
            ['RES_CMD', 'acquire_new_data'], ['RES_CMD', 'close'], ['RES_CMD', 'compare'],
            ['RES_CMD', 'get_attributes'], ['RES_CMD', 'get_fingerprint'], ['RES_CMD', 'get_status'],
            ['RES_CMD', 'has_new_data']]
        self.assertEquals(caps, lst)

        caps = self._ncom_agt_cli.get_capabilities(capability_types=['RES_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._ncom_agt_cli.get_capabilities(capability_types=['AGT_CMD'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

        caps = self._ncom_agt_cli.get_capabilities(capability_types=['AGT_PAR'])
        log.debug("resource commands: %s" % caps)
        self.assertEqual(type(caps), list)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_get_attrs(self):
        cmd = AgentCommand(command_id="111", command="get_attributes")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_get_fingerprint(self):
        cmd = AgentCommand(command_id="111", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_single_worker(self):
        cmd = AgentCommand(command_id="111", command="get_attributes")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="112", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_multi_worker(self):
        cmd = AgentCommand(command_id="111", command="has_new_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="111", command="has_new_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._hfr_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="112", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

        cmd = AgentCommand(command_id="112", command="get_fingerprint")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._hfr_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)
        self.assertTrue(type(ret.result), dict)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_acquire_data(self):
        cmd = AgentCommand(command_id="113", command="acquire_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)

    @unittest.skip("Currently broken due to resource/agent refactorings")
    def test_execute_acquire_new_data(self):
        cmd = AgentCommand(command_id="113", command="acquire_new_data")
        log.debug("Execute AgentCommand: %s" % cmd)
        ret = self._ncom_agt_cli.execute(cmd)
        log.debug("Returned: %s" % ret)
        self.assertEquals(ret.status, 0)

    @unittest.skip("Underlying method not yet implemented")
    def test_set_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.set_param(resource_id=res_id, name="param", value="value")

    @unittest.skip("Underlying method not yet implemented")
    def test_get_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.get_param(resource_id=res_id, name="param")

    @unittest.skip("Underlying method not yet implemented")
    def test_execute_agent(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.execute_agent(resource_id=res_id)

    @unittest.skip("Underlying method not yet implemented")
    def test_set_agent_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.set_agent_param(resource_id=res_id, name="param", value="value")

    @unittest.skip("Underlying method not yet implemented")
    def test_get_agent_param(self):
        res_id = self.ncom_ds_id

        log.debug("test_get_worker with res_id: %s" % res_id)
        res = self.eoas_cli.get_worker(res_id)

        with self.assertRaises(IonException):
            self.eoas_cli.get_agent_param(resource_id=res_id, name="param")
class BulkIngestBase(object):
    """
    awkward, non-obvious test class!  subclasses will implement data-specific methods and
    this test class will parse sample file and assert data was read.

    test_data_ingest: create resources and call...
        start_agent: starts agent and then call...
            start_listener: starts listeners for data, including one that when granule is received calls...
                get_retrieve_client: asserts that callback had some data

    See replacement TestPreloadThenLoadDataset.  A little more declarative and straight-forward, but much slower (requires preload).
    """

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2deploy.yml")

        self.pubsub_management = PubsubManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.data_acquisition_management = DataAcquisitionManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node)
        self.resource_registry = self.container.resource_registry
        self.context_ids = self.build_param_contexts()
        self.setup_resources()

    def build_param_contexts(self):
        raise NotImplementedError("build_param_contexts must be implemented in child classes")

    def create_external_dataset(self):
        raise NotImplementedError("create_external_dataset must be implemented in child classes")

    def get_dvr_config(self):
        raise NotImplementedError("get_dvr_config must be implemented in child classes")

    def get_retrieve_client(self, dataset_id=""):
        raise NotImplementedError("get_retrieve_client must be implemented in child classes")

    def test_data_ingest(self):
        self.pdict_id = self.create_parameter_dict(self.name)
        self.stream_def_id = self.create_stream_def(self.name, self.pdict_id)
        self.data_product_id = self.create_data_product(self.name, self.description, self.stream_def_id)
        self.dataset_id = self.get_dataset_id(self.data_product_id)
        self.stream_id, self.route = self.get_stream_id_and_route(self.data_product_id)
        self.external_dataset_id = self.create_external_dataset()
        self.data_producer_id = self.register_external_dataset(self.external_dataset_id)
        self.start_agent()

    def create_parameter_dict(self, name=""):
        return self.dataset_management.create_parameter_dictionary(
            name=name, parameter_context_ids=self.context_ids, temporal_context="time"
        )

    def create_stream_def(self, name="", pdict_id=""):
        return self.pubsub_management.create_stream_definition(name=name, parameter_dictionary_id=pdict_id)

    def create_data_product(self, name="", description="", stream_def_id=""):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name=name,
            description=description,
            processing_level_code="Parsed_Canonical",
            temporal_domain=tdom,
            spatial_domain=sdom,
        )

        data_product_id = self.data_product_management.create_data_product(
            data_product=dp_obj, stream_definition_id=stream_def_id
        )
        self.data_product_management.activate_data_product_persistence(data_product_id)
        return data_product_id

    def register_external_dataset(self, external_dataset_id=""):
        return self.data_acquisition_management.register_external_data_set(external_dataset_id=external_dataset_id)

    def get_dataset_id(self, data_product_id=""):
        dataset_ids, assocs = self.resource_registry.find_objects(
            subject=data_product_id, predicate="hasDataset", id_only=True
        )
        return dataset_ids[0]

    def get_stream_id_and_route(self, data_product_id):
        stream_ids, _ = self.resource_registry.find_objects(data_product_id, PRED.hasStream, RT.Stream, id_only=True)
        stream_id = stream_ids[0]
        route = self.pubsub_management.read_stream_route(stream_id)
        # self.create_logger(self.name, stream_id)
        return stream_id, route

    def start_agent(self):
        agent_config = {
            "driver_config": self.get_dvr_config(),
            "stream_config": {},
            "agent": {"resource_id": self.external_dataset_id},
            "test_mode": True,
        }

        self._ia_pid = self.container.spawn_process(
            name=self.EDA_NAME, module=self.EDA_MOD, cls=self.EDA_CLS, config=agent_config
        )

        self._ia_client = ResourceAgentClient(self.external_dataset_id, process=FakeProcess())

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=DriverEvent.START_AUTOSAMPLE)
        self._ia_client.execute_resource(command=cmd)

        self.start_listener(self.dataset_id)

    def stop_agent(self):
        cmd = AgentCommand(command=DriverEvent.STOP_AUTOSAMPLE)
        self._ia_client.execute_resource(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        self._ia_client.execute_agent(cmd)
        self.container.terminate_process(self._ia_pid)

    def start_listener(self, dataset_id=""):
        dataset_modified = Event()
        # callback to use retrieve to get data from the coverage
        def cb(*args, **kwargs):
            self.get_retrieve_client(dataset_id=dataset_id)

        # callback to keep execution going once dataset has been fully ingested
        def cb2(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id)
        es.start()

        es2 = EventSubscriber(
            event_type=OT.DeviceCommonLifecycleEvent, callback=cb2, origin="BaseDataHandler._acquire_sample"
        )
        es2.start()

        self.addCleanup(es.stop)
        self.addCleanup(es2.stop)

        # let it go for up to 120 seconds, then stop the agent and reset it
        dataset_modified.wait(120)
        self.stop_agent()

    def create_logger(self, name, stream_id=""):

        # logger process
        producer_definition = ProcessDefinition(name=name + "_logger")
        producer_definition.executable = {
            "module": "ion.processes.data.stream_granule_logger",
            "class": "StreamGranuleLogger",
        }

        logger_procdef_id = self.process_dispatch_client.create_process_definition(
            process_definition=producer_definition
        )
        configuration = {"process": {"stream_id": stream_id}}
        pid = self.process_dispatch_client.schedule_process(
            process_definition_id=logger_procdef_id, configuration=configuration
        )

        return pid
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            data_source_type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.force_delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    def test_register_instrument(self):
        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        self.base_register_instrument(instrument_id)

    def test_register_platform(self):
        # set up initial instrument to register
        platform_obj = IonObject(RT.PlatformDevice, name='Plat1',description='a platform that is creating the data product')
        platform_id, rev = self.rrclient.create(platform_obj)

    #@unittest.skip('Not done yet.')
    def base_register_instrument(self, instrument_id):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id)
            self.client.assign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if not assocs or len(assocs) == 0:
            self.fail("failed to assign data product to data producer")

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id)
            self.client.unassign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if  assocs:
            self.fail("failed to unassign data product to data producer")

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)





    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               data_source_type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
                self.client.delete_data_source_model(datamodel_id)
                self.client.delete_external_dataset_agent(datasetagent_id)
                self.client.delete_data_source_agent_instance(datasource_agent_instance_id)

                self.client.force_delete_external_data_provider(dataprovider_id)
                self.client.force_delete_data_source(datasource_id)
                self.client.force_delete_external_dataset(extdataset_id)
                self.client.force_delete_data_source_model(datamodel_id)
                self.client.force_delete_external_dataset_agent(datasetagent_id)
                self.client.force_delete_data_source_agent_instance(datasource_agent_instance_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)


    def make_grt_parser(self):
        return self.client.create_parser(Parser(name='grt', description='', module='ion.util.parsers.global_range_test', method='grt_parser', config=None))


    @unittest.skip("Deprecated")
    def test_qc_attachment(self):
        instrument_device = InstrumentDevice(name='whatever')
        instrument_device_id,_ = self.rrclient.create(instrument_device)
        self.addCleanup(self.rrclient.delete, instrument_device_id)
        self.client.register_instrument(instrument_device_id)
        self.addCleanup(self.client.unregister_instrument, instrument_device_id)
        dp = DataProduct(name='instrument output')

        dp_id,_ = self.rrclient.create(dp)
        self.addCleanup(self.rrclient.delete, dp_id)

        parser_id = self.make_grt_parser()
        attachment = Attachment(name='qc ref', attachment_type=AttachmentType.REFERENCE,content=global_range_test_document, context=ReferenceAttachmentContext(parser_id=parser_id))
        att_id = self.rrclient.create_attachment(dp_id, attachment)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        attachment2 = Attachment(name='qc ref2', attachment_type=AttachmentType.REFERENCE, content=global_range_test_document2, context=ReferenceAttachmentContext(parser_id=parser_id))
        att2_id = self.rrclient.create_attachment(dp_id, attachment2)
        self.addCleanup(self.rrclient.delete_attachment, att2_id)

        self.client.assign_data_product(instrument_device_id, dp_id)
        self.addCleanup(self.client.unassign_data_product, instrument_device_id, dp_id)
        svm = StoredValueManager(self.container)
        doc = svm.read_value('grt_CE01ISSM-MF005-01-CTDBPC999_TEMPWAT')
        np.testing.assert_array_almost_equal(doc['grt_min_value'], -2.)
Example #20
0
class BulkIngestBase(object):
    """
    awkward, non-obvious test class!  subclasses will implement data-specific methods and
    this test class will parse sample file and assert data was read.

    test_data_ingest: create resources and call...
        start_agent: starts agent and then call...
            start_listener: starts listeners for data, including one that when granule is received calls...
                get_retrieve_client: asserts that callback had some data

    See replacement TestPreloadThenLoadDataset.  A little more declarative and straight-forward, but much slower (requires preload).
    """
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub_management    = PubsubManagementServiceClient()
        self.dataset_management   = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
        self.data_acquisition_management = DataAcquisitionManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.process_dispatch_client = ProcessDispatcherServiceClient(node=self.container.node)
        self.resource_registry       = self.container.resource_registry
        self.context_ids = self.build_param_contexts()
        self.setup_resources()

    def build_param_contexts(self):
        raise NotImplementedError('build_param_contexts must be implemented in child classes')

    def create_external_dataset(self):
        raise NotImplementedError('create_external_dataset must be implemented in child classes')

    def get_dvr_config(self):
        raise NotImplementedError('get_dvr_config must be implemented in child classes')

    def get_retrieve_client(self, dataset_id=''):
        raise NotImplementedError('get_retrieve_client must be implemented in child classes')

    def test_data_ingest(self):
        self.pdict_id = self.create_parameter_dict(self.name)
        self.stream_def_id = self.create_stream_def(self.name, self.pdict_id)
        self.data_product_id = self.create_data_product(self.name, self.description, self.stream_def_id)
        self.dataset_id = self.get_dataset_id(self.data_product_id)
        self.stream_id, self.route = self.get_stream_id_and_route(self.data_product_id)
        self.external_dataset_id = self.create_external_dataset()
        self.data_producer_id = self.register_external_dataset(self.external_dataset_id)
        self.start_agent()

    def create_parameter_dict(self, name=''):
        return self.dataset_management.create_parameter_dictionary(name=name, parameter_context_ids=self.context_ids, temporal_context='time')

    def create_stream_def(self, name='', pdict_id=''):
        return self.pubsub_management.create_stream_definition(name=name, parameter_dictionary_id=pdict_id)

    def create_data_product(self, name='', description='', stream_def_id=''):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name=name,
            description=description,
            processing_level_code='Parsed_Canonical',
            temporal_domain=tdom,
            spatial_domain=sdom)

        data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
        return data_product_id

    def register_external_dataset(self, external_dataset_id=''):
        return self.data_acquisition_management.register_external_data_set(external_dataset_id=external_dataset_id)

    def get_dataset_id(self, data_product_id=''):
        dataset_ids, assocs = self.resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True)
        return dataset_ids[0]

    def get_stream_id_and_route(self, data_product_id):
        stream_ids, _ = self.resource_registry.find_objects(data_product_id, PRED.hasStream, RT.Stream, id_only=True)
        stream_id = stream_ids[0]
        route = self.pubsub_management.read_stream_route(stream_id)
        #self.create_logger(self.name, stream_id)
        return stream_id, route

    def start_agent(self):
        agent_config = {
            'driver_config': self.get_dvr_config(),
            'stream_config': {},
            'agent': {'resource_id': self.external_dataset_id},
            'test_mode': True
        }

        self._ia_pid = self.container.spawn_process(
            name=self.EDA_NAME,
            module=self.EDA_MOD,
            cls=self.EDA_CLS,
            config=agent_config)

        self._ia_client = ResourceAgentClient(self.external_dataset_id, process=FakeProcess())

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command=DriverEvent.START_AUTOSAMPLE)
        self._ia_client.execute_resource(command=cmd)

        self.start_listener(self.dataset_id)

    def stop_agent(self):
        cmd = AgentCommand(command=DriverEvent.STOP_AUTOSAMPLE)
        self._ia_client.execute_resource(cmd)
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        self._ia_client.execute_agent(cmd)
        self.container.terminate_process(self._ia_pid)

    def start_listener(self, dataset_id=''):
        dataset_modified = Event()
        #callback to use retrieve to get data from the coverage
        def cb(*args, **kwargs):
            self.get_retrieve_client(dataset_id=dataset_id)

        #callback to keep execution going once dataset has been fully ingested
        def cb2(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id)
        es.start()

        es2 = EventSubscriber(event_type=OT.DeviceCommonLifecycleEvent, callback=cb2, origin='BaseDataHandler._acquire_sample')
        es2.start()

        self.addCleanup(es.stop)
        self.addCleanup(es2.stop)

        #let it go for up to 120 seconds, then stop the agent and reset it
        dataset_modified.wait(120)
        self.stop_agent()

    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.process_dispatch_client.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.process_dispatch_client.schedule_process(process_definition_id=logger_procdef_id, configuration=configuration)

        return pid
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier',
            ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(RT.DataProduct,
            name='usgs_parsed_product',
            description='parsed usgs product',
            temporal_domain = tdom,
            spatial_domain = sdom)

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
                                                    stream_definition_id=streamdef_id,
                                                    parameter_dictionary=parameter_dictionary)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

#        ttool = TaxyTool()
#        ttool.add_taxonomy_set('time','time')
#        ttool.add_taxonomy_set('lon','longitude')
#        ttool.add_taxonomy_set('lat','latitude')
#        ttool.add_taxonomy_set('z','water depth')
#        ttool.add_taxonomy_set('water_temperature', 'average water temperature')
#        ttool.add_taxonomy_set('water_temperature_bottom','water temperature at bottom of water column')
#        ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column')
#        ttool.add_taxonomy_set('streamflow', 'flow velocity of stream')
#        ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water')
#        ttool.add_taxonomy_set('data_qualifier','data qualifier flag')
#
#        ttool.add_taxonomy_set('coords','This group contains coordinate parameters')
#        ttool.add_taxonomy_set('data','This group contains data parameters')

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='usgs',stream_id=stream_id)

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding = numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            #'taxonomy':ttool.dump(),
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':4,
        }
Example #22
0
class TestBulkIngest(IonIntegrationTestCase):

    EDA_MOD = 'ion.agents.data.external_dataset_agent'
    EDA_CLS = 'ExternalDatasetAgent'


    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever    = DataRetrieverServiceClient(node=self.container.node)

        self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)

        # Data async and subscription  TODO: Replace with new subscriber
        self._finished_count = None
        #TODO: Switch to gevent.queue.Queue
        self._async_finished_result = AsyncResult()
        self._finished_events_received = []
        self._finished_event_subscriber = None
        self._start_finished_event_subscriber()
        self.addCleanup(self._stop_finished_event_subscriber)


        self.DVR_CONFIG = {}
        self.DVR_CONFIG = {
            'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler',
            'dvr_cls' : 'SlocumDataHandler',
            }

        self._setup_resources()

        self.agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : {},
            'agent'         : {'resource_id': self.EDA_RESOURCE_ID},
            'test_mode' : True
        }

        datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance,  name='ExternalDatasetAgentInstance1', description='external data agent instance',
                                              handler_module=self.EDA_MOD, handler_class=self.EDA_CLS,
                                              dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config )
        self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj,
                                                                                                 external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID)


        #TG: Setup/configure the granule logger to log granules as they're published
        pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id)

        dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id)
        print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj


        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('datasetagentclient', name=pid,  process=FakeProcess())
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))



    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)

        return pid

    def _start_finished_event_subscriber(self):

        def consume_event(*args,**kwargs):
            log.debug('EventSubscriber event received: %s', str(args[0]) )
            if args[0].description == 'TestingFinished':
                log.debug('TestingFinished event received')
                self._finished_events_received.append(args[0])
                if self._finished_count and self._finished_count == len(self._finished_events_received):
                    log.debug('Finishing test...')
                    self._async_finished_result.set(len(self._finished_events_received))
                    log.debug('Called self._async_finished_result.set({0})'.format(len(self._finished_events_received)))

        self._finished_event_subscriber = EventSubscriber(event_type='DeviceEvent', callback=consume_event)
        self._finished_event_subscriber.start()

    def _stop_finished_event_subscriber(self):
        if self._finished_event_subscriber:
            self._finished_event_subscriber.stop()
            self._finished_event_subscriber = None


    def tearDown(self):
        pass


    @unittest.skip('Update to agent refactor.')
    def test_slocum_data_ingest(self):

        HIST_CONSTRAINTS_1 = {}
        # Test instrument driver execute interface to start and stop streaming mode.
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)




        # Make sure the polling interval is appropriate for a test
        params = {
            'POLLING_INTERVAL': 3
        }
        self._ia_client.set_param(params)

        self._finished_count = 1

        cmd = AgentCommand(command='acquire_data')
        self._ia_client.execute(cmd)

        # Assert that data was received
        self._async_finished_result.get(timeout=15)

        self.assertTrue(len(self._finished_events_received) >= 1)

        cmd = AgentCommand(command='reset')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)


        #todo enable after Luke's mor to retrieve, right now must have the Time axis called 'time'
        #        replay_granule = self.data_retriever.retrieve_last_data_points(self.dataset_id, 10)

        #        rdt = RecordDictionaryTool.load_from_granule(replay_granule)
        #
        #        comp = rdt['date_pattern'] == numpy.arange(10) + 10
        #
        #        log.debug("TestBulkIngest: comp: %s", comp)
        #
        #        self.assertTrue(comp.all())

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)


    def _setup_resources(self):

        self.loggerpids = []

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSetModel
        dataset_model = ExternalDatasetModel(name='slocum_model')
        dataset_model.datset_type = 'SLOCUM'
        dataset_model_id = self.dams_client.create_external_dataset_model(dataset_model)

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())


        dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
        dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['date_pattern'] = '%Y %j'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch',
            ]



        ## Create the external dataset
        ds_id = self.dams_client.create_external_dataset(external_dataset=dset, external_dataset_model_id=dataset_model_id)
        ext_dprov_id = self.dams_client.create_external_data_provider(external_data_provider=dprov)

        # Register the ExternalDataset
        dproducer_id = self.dams_client.register_external_data_set(external_dataset_id=ds_id)

        ## Create the dataset agent
        datasetagent_obj = IonObject(RT.ExternalDatasetAgent,  name='ExternalDatasetAgent1', description='external data agent', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS )
        self.datasetagent_id = self.dams_client.create_external_dataset_agent(external_dataset_agent=datasetagent_obj, external_dataset_model_id=dataset_model_id)

        # Generate the data product and associate it to the ExternalDataset
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        streamdef_id = self.pubsub_client.create_stream_definition(name="temp", parameter_dictionary_id=pdict.identifier)

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()


        dprod = IonObject(RT.DataProduct,
                          name='slocum_parsed_product',
                          description='parsed slocum product',
                          temporal_domain = tdom,
                          spatial_domain = sdom)

        self.dproduct_id = self.dataproductclient.create_data_product(data_product=dprod,
                                                                      stream_definition_id=streamdef_id)

        self.dams_client.assign_data_product(input_resource_id=ds_id, data_product_id=self.dproduct_id)

        #save the incoming slocum data
        self.dataproductclient.activate_data_product_persistence(self.dproduct_id)
        self.addCleanup(self.dataproductclient.suspend_data_product_persistence, self.dproduct_id)
        
        stream_ids, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_ids[0]

        dataset_id, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True)
        self.dataset_id = dataset_id[0]

        pid = self.create_logger('slocum_parsed_product', stream_id )
        self.loggerpids.append(pid)

        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
            }

        # Create the logger for receiving publications
        #self.create_stream_and_logger(name='slocum',stream_id=stream_id)
        # Create agent config.
        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'ruv_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        dset.dataset_description.parameters['dataset_path'] = 'test_data/RDLi_SEAB_2011_08_24_1600.ruv'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='ruv_model')
        dsrc_model.model = 'RUV'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='ruv_parsed_product', description='parsed ruv product')
        dproduct_id = dpms_cli.create_data_product(data_product=dprod)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id, create_stream=True)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

        ttool = TaxyTool()

        ttool.add_taxonomy_set('data','test data')

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='ruv',stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'external_dataset_res':dset,
            'taxonomy':ttool.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
        }
Example #24
0
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    # DataHandler config
    DVR_CONFIG = {
        'dvr_mod': 'ion.agents.data.handlers.base_data_handler',
        'dvr_cls': 'DummyDataHandler',
    }

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)

#    @unittest.skip('not yet working. fix activate_data_product_persistence()')
#@unittest.skip()

    @unittest.skip('not working')
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel,
                                    name='ExampleDatasetModel',
                                    description="ExampleDatasetModel",
                                    datset_type="FibSeries")
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(
                datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s",
            str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent,
                                     name='datasetagent007',
                                     description="datasetagent007",
                                     handler_module=EDA_MOD,
                                     handler_class=EDA_CLS)
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(
                datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s",
            str(datasetAgent_id))

        # Create ExternalDataset
        log.debug(
            'TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset,
                                   name='ExtDataset',
                                   description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(
                extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %
                      ex)

        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ",
            str(extDataset_id))

        #register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(
            extDataset_id)

        # create a stream definition for the data from the ctd simulator

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        log.debug(
            "TestExternalDatasetAgentMgmt: new Stream Definition id = %s",
            str(ctd_stream_def_id))

        log.debug(
            "TestExternalDatasetAgentMgmt: Creating new data product with a stream definition"
        )
        dp_obj = IonObject(RT.DataProduct,
                           name='eoi dataset data',
                           description=' stream test')

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        data_product_id1 = self.dpclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s",
                  str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id,
                                            data_product_id=data_product_id1)

        #todo fix the problem here....
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s",
                  str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set('data', 'external_data')

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id':
            stream_id,  #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            'data_producer_id': dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            'taxonomy': tx.dump(),  #TODO: Currently does not support sets
            'max_records': 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config': self.DVR_CONFIG,
            'stream_config': self._stream_config,
            'agent': {
                'resource_id': EDA_RESOURCE_ID
            },
            'test_mode': True
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name='DatasetAgentInstance',
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(extDatasetAgentInstance_obj))
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s",
            str(extDatasetAgentInstance_id))

        #Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ",
            str(id), str(active))

        self.damsclient.start_external_dataset_agent_instance(
            extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(
            extDatasetAgentInstance_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ",
            str(id), str(active))

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,
                                               process=FakeProcess())
        print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s",
                  str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='resume')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='reset')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(
            extDatasetAgentInstance_id)

    def test_dataset_agent_prepare_support(self):

        eda_sup = self.damsclient.prepare_external_dataset_agent_support()

        eda_obj = IonObject(RT.ExternalDatasetAgent,
                            name="ExternalDatasetAgent")
        eda_id = self.damsclient.create_external_dataset_agent(eda_obj)

        eda_sup = self.damsclient.prepare_external_dataset_agent_support(
            external_dataset_agent_id=eda_id)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support(
        )

        edai_obj = IonObject(RT.ExternalDatasetAgentInstance,
                             name="ExternalDatasetAgentInstance")
        edai_id = self.damsclient.create_external_dataset_agent_instance(
            edai_obj)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support(
            external_dataset_agent_instance_id=edai_id)
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    #@unittest.skip('Not done yet.')
    def test_register_instrument(self):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id, True)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)



    def test_register_process(self):
        # Register a data process as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        process_obj = IonObject(RT.DataProcess, name='Proc1',description='a data process transform')
        process_id, rev = self.rrclient.create(process_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new process
        try:
            ds_id = self.client.register_process(process_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to a process, no stream create
        try:
            self.client.assign_data_product(process_id, dataproduct_id, False)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to create new data producer: %s" %ex)


        # test UNassigning a data product from the data process, deleting the stream for the product
        try:
            self.client.unassign_data_product(process_id, dataproduct_id, False)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a process
        try:
            ds_id = self.client.unregister_process(process_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model',
                               model='model1')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(name='example data agent',
                                   handler_module=self.DVR_CONFIG['dvr_mod'],
                                   handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance(
            name='example dataset agent instance')
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(name='example data provider',
                                     institution=Institution(),
                                     contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(name='example datasource',
                          protocol_type='FILE',
                          institution=Institution(),
                          contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'ruv_test_dataset'
        dset = ExternalDataset(name=ds_name,
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/ruv/'
        dset.dataset_description.parameters[
            'list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv'
        dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M'
        dset.dataset_description.parameters[
            'date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = []

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='ruv_model')
        #dsrc_model.model = 'RUV'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=eda_inst_id)

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext(
            'data',
            param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            pc_list.append(dms_cli.create_parameter_context(
                pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('ruv_param_dict',
                                                       pc_list)

        streamdef_id = pubsub_cli.create_stream_definition(
            name="ruv",
            description="stream def for ruv testing",
            parameter_dictionary_id=pdict_id)

        dprod = IonObject(RT.DataProduct,
                          name='ruv_parsed_product',
                          description='parsed ruv product')

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(
            data_product=dprod, stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id,
                                     data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id,
                                              predicate=PRED.hasStream,
                                              object_type=RT.Stream,
                                              id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({
            'ExternalDataset': ds_id,
            'ExternalDataProvider': ext_dprov_id,
            'DataSource': ext_dsrc_id,
            'DataSourceModel': ext_dsrc_model_id,
            'DataProducer': dproducer_id,
            'DataProduct': dproduct_id,
            'Stream': stream_id
        }))

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='ruv',
                                                           stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'external_dataset_res': dset,
            'param_dictionary': pdict.dump(),
            'data_producer_id':
            dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 20,
        }
class TestBulkIngest(IonIntegrationTestCase):

    EDA_MOD = 'ion.agents.data.external_dataset_agent'
    EDA_CLS = 'ExternalDatasetAgent'


    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever    = DataRetrieverServiceClient(node=self.container.node)

        self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)

        # Data async and subscription  TODO: Replace with new subscriber
        self._finished_count = None
        #TODO: Switch to gevent.queue.Queue
        self._async_finished_result = AsyncResult()
        self._finished_events_received = []
        self._finished_event_subscriber = None
        self._start_finished_event_subscriber()
        self.addCleanup(self._stop_finished_event_subscriber)


        self.DVR_CONFIG = {}
        self.DVR_CONFIG = {
            'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler',
            'dvr_cls' : 'SlocumDataHandler',
            }

        self._setup_resources()

        self.agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : {},
            'agent'         : {'resource_id': self.EDA_RESOURCE_ID},
            'test_mode' : True
        }

        datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance,  name='ExternalDatasetAgentInstance1', description='external data agent instance',
                                              handler_module=self.EDA_MOD, handler_class=self.EDA_CLS,
                                              dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config )
        self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj,
                                                                                                 external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID)


        #TG: Setup/configure the granule logger to log granules as they're published
        pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id)

        dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id)
        print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj


        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('datasetagentclient', name=pid,  process=FakeProcess())
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))



    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)

        return pid

    def _start_finished_event_subscriber(self):

        def consume_event(*args,**kwargs):
            log.debug('EventSubscriber event received: %s', str(args[0]) )
            if args[0].description == 'TestingFinished':
                log.debug('TestingFinished event received')
                self._finished_events_received.append(args[0])
                if self._finished_count and self._finished_count == len(self._finished_events_received):
                    log.debug('Finishing test...')
                    self._async_finished_result.set(len(self._finished_events_received))
                    log.debug('Called self._async_finished_result.set({0})'.format(len(self._finished_events_received)))

        self._finished_event_subscriber = EventSubscriber(event_type='DeviceEvent', callback=consume_event)
        self._finished_event_subscriber.start()

    def _stop_finished_event_subscriber(self):
        if self._finished_event_subscriber:
            self._finished_event_subscriber.stop()
            self._finished_event_subscriber = None


    def tearDown(self):
        pass


    @unittest.skip('Update to agent refactor.')
    def test_slocum_data_ingest(self):

        HIST_CONSTRAINTS_1 = {}
        # Test instrument driver execute interface to start and stop streaming mode.
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)




        # Make sure the polling interval is appropriate for a test
        params = {
            'POLLING_INTERVAL': 3
        }
        self._ia_client.set_param(params)

        self._finished_count = 1

        cmd = AgentCommand(command='acquire_data')
        self._ia_client.execute(cmd)

        # Assert that data was received
        self._async_finished_result.get(timeout=15)

        self.assertTrue(len(self._finished_events_received) >= 1)

        cmd = AgentCommand(command='reset')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)


        #todo enable after Luke's mor to retrieve, right now must have the Time axis called 'time'
        #        replay_granule = self.data_retriever.retrieve_last_data_points(self.dataset_id, 10)

        #        rdt = RecordDictionaryTool.load_from_granule(replay_granule)
        #
        #        comp = rdt['date_pattern'] == numpy.arange(10) + 10
        #
        #        log.debug("TestBulkIngest: comp: %s", comp)
        #
        #        self.assertTrue(comp.all())

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)


    def _setup_resources(self):

        self.loggerpids = []

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSetModel
        dataset_model = ExternalDatasetModel(name='slocum_model')
        dataset_model.datset_type = 'SLOCUM'
        dataset_model_id = self.dams_client.create_external_dataset_model(dataset_model)

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())


        dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
        dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['date_pattern'] = '%Y %j'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch',
            ]



        ## Create the external dataset
        ds_id = self.dams_client.create_external_dataset(external_dataset=dset, external_dataset_model_id=dataset_model_id)
        ext_dprov_id = self.dams_client.create_external_data_provider(external_data_provider=dprov)

        # Register the ExternalDataset
        dproducer_id = self.dams_client.register_external_data_set(external_dataset_id=ds_id)

        ## Create the dataset agent
        datasetagent_obj = IonObject(RT.ExternalDatasetAgent,  name='ExternalDatasetAgent1', description='external data agent', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS )
        self.datasetagent_id = self.dams_client.create_external_dataset_agent(external_dataset_agent=datasetagent_obj, external_dataset_model_id=dataset_model_id)

        # Generate the data product and associate it to the ExternalDataset
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        streamdef_id = self.pubsub_client.create_stream_definition(name="temp", parameter_dictionary_id=pdict.identifier)

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()


        dprod = IonObject(RT.DataProduct,
                          name='slocum_parsed_product',
                          description='parsed slocum product',
                          temporal_domain = tdom,
                          spatial_domain = sdom)

        self.dproduct_id = self.dataproductclient.create_data_product(data_product=dprod,
                                                                      stream_definition_id=streamdef_id)

        self.dams_client.assign_data_product(input_resource_id=ds_id, data_product_id=self.dproduct_id)

        #save the incoming slocum data
        self.dataproductclient.activate_data_product_persistence(self.dproduct_id)

        stream_ids, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_ids[0]

        dataset_id, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True)
        self.dataset_id = dataset_id[0]

        pid = self.create_logger('slocum_parsed_product', stream_id )
        self.loggerpids.append(pid)

        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
            }

        # Create the logger for receiving publications
        #self.create_stream_and_logger(name='slocum',stream_id=stream_id)
        # Create agent config.
        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(),
                                     contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='DAP',
                          institution=Institution(),
                          contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name,
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters[
            'dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier',
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # Generate the data product and associate it to the ExternalDataset
        dprod = DataProduct(name='usgs_parsed_product',
                            description='parsed usgs product')
        dproduct_id = dpms_cli.create_data_product(data_product=dprod)

        dams_cli.assign_data_product(input_resource_id=ds_id,
                                     data_product_id=dproduct_id,
                                     create_stream=True)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id,
                                              predicate=PRED.hasStream,
                                              object_type=RT.Stream,
                                              id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({
            'ExternalDataset': ds_id,
            'ExternalDataProvider': ext_dprov_id,
            'DataSource': ext_dsrc_id,
            'DataSourceModel': ext_dsrc_model_id,
            'DataProducer': dproducer_id,
            'DataProduct': dproduct_id,
            'Stream': stream_id
        }))

        #CBM: Use CF standard_names

        ttool = TaxyTool()
        ttool.add_taxonomy_set('time', 'time')
        ttool.add_taxonomy_set('lon', 'longitude')
        ttool.add_taxonomy_set('lat', 'latitude')
        ttool.add_taxonomy_set('z', 'water depth')
        ttool.add_taxonomy_set('water_temperature',
                               'average water temperature')
        ttool.add_taxonomy_set('water_temperature_bottom',
                               'water temperature at bottom of water column')
        ttool.add_taxonomy_set('water_temperature_middle',
                               'water temperature at middle of water column')
        ttool.add_taxonomy_set('streamflow', 'flow velocity of stream')
        ttool.add_taxonomy_set('specific_conductance',
                               'specific conductance of water')
        ttool.add_taxonomy_set('data_qualifier', 'data qualifier flag')

        ttool.add_taxonomy_set('coords',
                               'This group contains coordinate parameters')
        ttool.add_taxonomy_set('data', 'This group contains data parameters')

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='usgs', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'taxonomy': ttool.dump(),
            'data_producer_id':
            dproducer_id,  #CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 4,
        }