コード例 #1
0
class TestBulkIngest(IonIntegrationTestCase):

    EDA_MOD = 'ion.agents.data.external_dataset_agent'
    EDA_CLS = 'ExternalDatasetAgent'


    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever    = DataRetrieverServiceClient(node=self.container.node)

        self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)

        # Data async and subscription  TODO: Replace with new subscriber
        self._finished_count = None
        #TODO: Switch to gevent.queue.Queue
        self._async_finished_result = AsyncResult()
        self._finished_events_received = []
        self._finished_event_subscriber = None
        self._start_finished_event_subscriber()
        self.addCleanup(self._stop_finished_event_subscriber)


        self.DVR_CONFIG = {}
        self.DVR_CONFIG = {
            'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler',
            'dvr_cls' : 'SlocumDataHandler',
            }

        self._setup_resources()

        self.agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : {},
            'agent'         : {'resource_id': self.EDA_RESOURCE_ID},
            'test_mode' : True
        }

        datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance,  name='ExternalDatasetAgentInstance1', description='external data agent instance',
                                              handler_module=self.EDA_MOD, handler_class=self.EDA_CLS,
                                              dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config )
        self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj,
                                                                                                 external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID)


        #TG: Setup/configure the granule logger to log granules as they're published
        pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id)

        dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id)
        print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj


        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('datasetagentclient', name=pid,  process=FakeProcess())
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))



    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)

        return pid

    def _start_finished_event_subscriber(self):

        def consume_event(*args,**kwargs):
            log.debug('EventSubscriber event received: %s', str(args[0]) )
            if args[0].description == 'TestingFinished':
                log.debug('TestingFinished event received')
                self._finished_events_received.append(args[0])
                if self._finished_count and self._finished_count == len(self._finished_events_received):
                    log.debug('Finishing test...')
                    self._async_finished_result.set(len(self._finished_events_received))
                    log.debug('Called self._async_finished_result.set({0})'.format(len(self._finished_events_received)))

        self._finished_event_subscriber = EventSubscriber(event_type='DeviceEvent', callback=consume_event)
        self._finished_event_subscriber.start()

    def _stop_finished_event_subscriber(self):
        if self._finished_event_subscriber:
            self._finished_event_subscriber.stop()
            self._finished_event_subscriber = None


    def tearDown(self):
        pass


    @unittest.skip('Update to agent refactor.')
    def test_slocum_data_ingest(self):

        HIST_CONSTRAINTS_1 = {}
        # Test instrument driver execute interface to start and stop streaming mode.
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)




        # Make sure the polling interval is appropriate for a test
        params = {
            'POLLING_INTERVAL': 3
        }
        self._ia_client.set_param(params)

        self._finished_count = 1

        cmd = AgentCommand(command='acquire_data')
        self._ia_client.execute(cmd)

        # Assert that data was received
        self._async_finished_result.get(timeout=15)

        self.assertTrue(len(self._finished_events_received) >= 1)

        cmd = AgentCommand(command='reset')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)


        #todo enable after Luke's mor to retrieve, right now must have the Time axis called 'time'
        #        replay_granule = self.data_retriever.retrieve_last_data_points(self.dataset_id, 10)

        #        rdt = RecordDictionaryTool.load_from_granule(replay_granule)
        #
        #        comp = rdt['date_pattern'] == numpy.arange(10) + 10
        #
        #        log.debug("TestBulkIngest: comp: %s", comp)
        #
        #        self.assertTrue(comp.all())

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)


    def _setup_resources(self):

        self.loggerpids = []

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSetModel
        dataset_model = ExternalDatasetModel(name='slocum_model')
        dataset_model.datset_type = 'SLOCUM'
        dataset_model_id = self.dams_client.create_external_dataset_model(dataset_model)

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())


        dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
        dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['date_pattern'] = '%Y %j'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch',
            ]



        ## Create the external dataset
        ds_id = self.dams_client.create_external_dataset(external_dataset=dset, external_dataset_model_id=dataset_model_id)
        ext_dprov_id = self.dams_client.create_external_data_provider(external_data_provider=dprov)

        # Register the ExternalDataset
        dproducer_id = self.dams_client.register_external_data_set(external_dataset_id=ds_id)

        ## Create the dataset agent
        datasetagent_obj = IonObject(RT.ExternalDatasetAgent,  name='ExternalDatasetAgent1', description='external data agent', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS )
        self.datasetagent_id = self.dams_client.create_external_dataset_agent(external_dataset_agent=datasetagent_obj, external_dataset_model_id=dataset_model_id)

        # Generate the data product and associate it to the ExternalDataset
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        streamdef_id = self.pubsub_client.create_stream_definition(name="temp", parameter_dictionary_id=pdict.identifier)

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()


        dprod = IonObject(RT.DataProduct,
                          name='slocum_parsed_product',
                          description='parsed slocum product',
                          temporal_domain = tdom,
                          spatial_domain = sdom)

        self.dproduct_id = self.dataproductclient.create_data_product(data_product=dprod,
                                                                      stream_definition_id=streamdef_id)

        self.dams_client.assign_data_product(input_resource_id=ds_id, data_product_id=self.dproduct_id)

        #save the incoming slocum data
        self.dataproductclient.activate_data_product_persistence(self.dproduct_id)

        stream_ids, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_ids[0]

        dataset_id, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True)
        self.dataset_id = dataset_id[0]

        pid = self.create_logger('slocum_parsed_product', stream_id )
        self.loggerpids.append(pid)

        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
            }

        # Create the logger for receiving publications
        #self.create_stream_and_logger(name='slocum',stream_id=stream_id)
        # Create agent config.
        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
コード例 #2
0
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    # DataHandler config
    DVR_CONFIG = {"dvr_mod": "ion.agents.data.handlers.base_data_handler", "dvr_cls": "DummyDataHandler"}

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2deploy.yml")

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(node=self.container.node)

    #    @unittest.skip('not yet working. fix activate_data_product_persistence()')
    # @unittest.skip()
    @unittest.skip("not working")
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(
            RT.ExternalDatasetModel,
            name="ExampleDatasetModel",
            description="ExampleDatasetModel",
            datset_type="FibSeries",
        )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(
            RT.ExternalDatasetAgent,
            name="datasetagent007",
            description="datasetagent007",
            handler_module=EDA_MOD,
            handler_class=EDA_CLS,
        )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id))

        # Create ExternalDataset
        log.debug("TestExternalDatasetAgentMgmt: Create external dataset resource ")
        extDataset_obj = IonObject(RT.ExternalDataset, name="ExtDataset", description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" % ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        # register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(extDataset_id)

        # create a stream definition for the data from the ctd simulator

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name="SBE37_CDM", parameter_dictionary_id=pdict_id)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct, name="eoi dataset data", description=" stream test")

        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(
            RT.DataProduct, name="DP1", description="some new dp", temporal_domain=tdom, spatial_domain=sdom
        )

        data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)

        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        # todo fix the problem here....
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set("data", "external_data")

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG["dh_cfg"] = {
            "TESTING": True,
            "stream_id": stream_id,  # TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            "data_producer_id": dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            "taxonomy": tx.dump(),  # TODO: Currently does not support sets
            "max_records": 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            "driver_config": self.DVR_CONFIG,
            "stream_config": self._stream_config,
            "agent": {"resource_id": EDA_RESOURCE_ID},
            "test_mode": True,
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name="DatasetAgentInstance",
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config,
        )
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id,
        )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj))
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id))

        # Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ", str(id), str(active)
        )

        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ", str(id), str(active)
        )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess())
        print "TestExternalDatasetAgentMgmt: got ia client %s", self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command="initialize")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command="go_active")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="pause")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command="resume")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="clear")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="pause")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command="clear")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="reset")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        # -------------------------------
        # Deactivate InstrumentAgentInstance
        # -------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)

    def test_dataset_agent_prepare_support(self):

        eda_sup = self.damsclient.prepare_external_dataset_agent_support()

        eda_obj = IonObject(RT.ExternalDatasetAgent, name="ExternalDatasetAgent")
        eda_id = self.damsclient.create_external_dataset_agent(eda_obj)

        eda_sup = self.damsclient.prepare_external_dataset_agent_support(external_dataset_agent_id=eda_id)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support()

        edai_obj = IonObject(RT.ExternalDatasetAgentInstance, name="ExternalDatasetAgentInstance")
        edai_id = self.damsclient.create_external_dataset_agent_instance(edai_obj)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support(
            external_dataset_agent_instance_id=edai_id
        )
コード例 #3
0
class TestPreloadThenLoadDataset(IonIntegrationTestCase):
    """ Uses the preload system to define the ExternalDataset and related resources,
        then invokes services to perform the load
    """
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        config = dict(op="load",
                      scenario="NOSE",
                      attachments="res/preload/r2_ioc/attachments")
        self.container.spawn_process("Loader",
                                     "ion.processes.bootstrap.ion_loader",
                                     "IONLoader",
                                     config=config)
        self.pubsub = PubsubManagementServiceClient()
        self.dams = DataAcquisitionManagementServiceClient()

    @unittest.skip("depricated test, now in mi repo")
    def test_use_case(self):
        # setUp() has already started the container and performed the preload
        #        self.assert_dataset_loaded('Test External CTD Dataset') # make sure we have the ExternalDataset resources
        self.assert_dataset_loaded(
            'Unit Test SMB37')  # association changed -- now use device name
        self.do_listen_for_incoming(
        )  # listen for any data being received from the dataset
        self.do_read_dataset()  # call services to load dataset
        self.assert_data_received()  # check that data was received as expected
        self.do_shutdown()

    def assert_dataset_loaded(self, name):
        rr = self.container.resource_registry
        #        self.external_dataset = self.find_object_by_name(name, RT.ExternalDataset)
        devs, _ = rr.find_resources(RT.InstrumentDevice,
                                    name=name,
                                    id_only=False)
        self.assertEquals(len(devs), 1)
        self.device = devs[0]
        obj, _ = rr.find_objects(subject=self.device._id,
                                 predicate=PRED.hasAgentInstance,
                                 object_type=RT.ExternalDatasetAgentInstance)
        self.agent_instance = obj[0]
        obj, _ = rr.find_objects(object_type=RT.ExternalDatasetAgent,
                                 predicate=PRED.hasAgentDefinition,
                                 subject=self.agent_instance._id)
        self.agent = obj[0]

        driver_cfg = self.agent_instance.driver_config
        #stream_definition_id = driver_cfg['dh_cfg']['stream_def'] if 'dh_cfg' in driver_cfg else driver_cfg['stream_def']
        #self.stream_definition = rr.read(stream_definition_id)

        self.data_product = rr.read_object(subject=self.device._id,
                                           predicate=PRED.hasOutputProduct,
                                           object_type=RT.DataProduct)

        self.dataset_id = rr.read_object(subject=self.data_product._id,
                                         predicate=PRED.hasDataset,
                                         object_type=RT.Dataset,
                                         id_only=True)

        ids, _ = rr.find_objects(subject=self.data_product._id,
                                 predicate=PRED.hasStream,
                                 object_type=RT.Stream,
                                 id_only=True)
        self.stream_id = ids[0]
        self.route = self.pubsub.read_stream_route(self.stream_id)

    def do_listen_for_incoming(self):
        subscription_id = self.pubsub.create_subscription(
            'validator', data_product_ids=[self.data_product._id])
        self.addCleanup(self.pubsub.delete_subscription, subscription_id)

        self.granule_capture = []
        self.granule_count = 0

        def on_granule(msg, route, stream_id):
            self.granule_count += 1
            if self.granule_count < 5:
                self.granule_capture.append(msg)

        validator = StandaloneStreamSubscriber('validator',
                                               callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub.deactivate_subscription, subscription_id)

        self.dataset_modified = Event()

        def cb2(*args, **kwargs):
            self.dataset_modified.set()
            # TODO: event isn't using the ExternalDataset, but a different ID for a Dataset

        es = EventSubscriber(event_type=OT.DatasetModified,
                             callback=cb2,
                             origin=self.dataset_id)
        es.start()
        self.addCleanup(es.stop)

    def do_read_dataset(self):
        self.dams.start_external_dataset_agent_instance(
            self.agent_instance._id)
        #
        # should i wait for process (above) to start
        # before launching client (below)?
        #
        self.client = None
        end = time.time() + MAX_AGENT_START_TIME
        while not self.client and time.time() < end:
            try:
                self.client = ResourceAgentClient(self.device._id,
                                                  process=FakeProcess())
            except NotFound:
                time.sleep(2)
        if not self.client:
            self.fail(
                msg='external dataset agent process did not start in %d seconds'
                % MAX_AGENT_START_TIME)
        self.client.execute_agent(
            AgentCommand(command=ResourceAgentEvent.INITIALIZE))
        self.client.execute_agent(
            AgentCommand(command=ResourceAgentEvent.GO_ACTIVE))
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.RUN))
        self.client.execute_resource(command=AgentCommand(
            command=DriverEvent.START_AUTOSAMPLE))

    def assert_data_received(self):

        #let it go for up to 120 seconds, then stop the agent and reset it
        if not self.dataset_modified.is_set():
            self.dataset_modified.wait(30)
        self.assertTrue(self.granule_count > 2,
                        msg='granule count = %d' % self.granule_count)

        rdt = RecordDictionaryTool.load_from_granule(self.granule_capture[0])
        self.assertAlmostEqual(0, rdt['oxygen'][0], delta=0.01)
        self.assertAlmostEqual(309.77, rdt['pressure'][0], delta=0.01)
        self.assertAlmostEqual(37.9848, rdt['conductivity'][0], delta=0.01)
        self.assertAlmostEqual(9.5163, rdt['temp'][0], delta=0.01)
        self.assertAlmostEqual(3527207897.0, rdt['time'][0], delta=1)

    def do_shutdown(self):
        self.dams.stop_external_dataset_agent_instance(self.agent_instance._id)
コード例 #4
0
class TestBulkIngest(IonIntegrationTestCase):

    EDA_MOD = 'ion.agents.data.external_dataset_agent'
    EDA_CLS = 'ExternalDatasetAgent'


    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dams_client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever    = DataRetrieverServiceClient(node=self.container.node)

        self._container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)

        # Data async and subscription  TODO: Replace with new subscriber
        self._finished_count = None
        #TODO: Switch to gevent.queue.Queue
        self._async_finished_result = AsyncResult()
        self._finished_events_received = []
        self._finished_event_subscriber = None
        self._start_finished_event_subscriber()
        self.addCleanup(self._stop_finished_event_subscriber)


        self.DVR_CONFIG = {}
        self.DVR_CONFIG = {
            'dvr_mod' : 'ion.agents.data.handlers.slocum_data_handler',
            'dvr_cls' : 'SlocumDataHandler',
            }

        self._setup_resources()

        self.agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : {},
            'agent'         : {'resource_id': self.EDA_RESOURCE_ID},
            'test_mode' : True
        }

        datasetagent_instance_obj = IonObject(RT.ExternalDatasetAgentInstance,  name='ExternalDatasetAgentInstance1', description='external data agent instance',
                                              handler_module=self.EDA_MOD, handler_class=self.EDA_CLS,
                                              dataset_driver_config=self.DVR_CONFIG, dataset_agent_config=self.agent_config )
        self.dataset_agent_instance_id = self.dams_client.create_external_dataset_agent_instance(external_dataset_agent_instance=datasetagent_instance_obj,
                                                                                                 external_dataset_agent_id=self.datasetagent_id, external_dataset_id=self.EDA_RESOURCE_ID)


        #TG: Setup/configure the granule logger to log granules as they're published
        pid = self.dams_client.start_external_dataset_agent_instance(self.dataset_agent_instance_id)

        dataset_agent_instance_obj= self.dams_client.read_external_dataset_agent_instance(self.dataset_agent_instance_id)
        print 'TestBulkIngest: Dataset agent instance obj: = ', dataset_agent_instance_obj


        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('datasetagentclient', name=pid,  process=FakeProcess())
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))



    def create_logger(self, name, stream_id=''):

        # logger process
        producer_definition = ProcessDefinition(name=name+'_logger')
        producer_definition.executable = {
            'module':'ion.processes.data.stream_granule_logger',
            'class':'StreamGranuleLogger'
        }

        logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
        configuration = {
            'process':{
                'stream_id':stream_id,
                }
        }
        pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)

        return pid

    def _start_finished_event_subscriber(self):

        def consume_event(*args,**kwargs):
            log.debug('EventSubscriber event received: %s', str(args[0]) )
            if args[0].description == 'TestingFinished':
                log.debug('TestingFinished event received')
                self._finished_events_received.append(args[0])
                if self._finished_count and self._finished_count == len(self._finished_events_received):
                    log.debug('Finishing test...')
                    self._async_finished_result.set(len(self._finished_events_received))
                    log.debug('Called self._async_finished_result.set({0})'.format(len(self._finished_events_received)))

        self._finished_event_subscriber = EventSubscriber(event_type='DeviceEvent', callback=consume_event)
        self._finished_event_subscriber.start()

    def _stop_finished_event_subscriber(self):
        if self._finished_event_subscriber:
            self._finished_event_subscriber.stop()
            self._finished_event_subscriber = None


    def tearDown(self):
        pass


    @unittest.skip('Update to agent refactor.')
    def test_slocum_data_ingest(self):

        HIST_CONSTRAINTS_1 = {}
        # Test instrument driver execute interface to start and stop streaming mode.
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)




        # Make sure the polling interval is appropriate for a test
        params = {
            'POLLING_INTERVAL': 3
        }
        self._ia_client.set_param(params)

        self._finished_count = 1

        cmd = AgentCommand(command='acquire_data')
        self._ia_client.execute(cmd)

        # Assert that data was received
        self._async_finished_result.get(timeout=15)

        self.assertTrue(len(self._finished_events_received) >= 1)

        cmd = AgentCommand(command='reset')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)


        #todo enable after Luke's mor to retrieve, right now must have the Time axis called 'time'
        #        replay_granule = self.data_retriever.retrieve_last_data_points(self.dataset_id, 10)

        #        rdt = RecordDictionaryTool.load_from_granule(replay_granule)
        #
        #        comp = rdt['date_pattern'] == numpy.arange(10) + 10
        #
        #        log.debug("TestBulkIngest: comp: %s", comp)
        #
        #        self.assertTrue(comp.all())

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)


    def _setup_resources(self):

        self.loggerpids = []

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSetModel
        dataset_model = ExternalDatasetModel(name='slocum_model')
        dataset_model.datset_type = 'SLOCUM'
        dataset_model_id = self.dams_client.create_external_dataset_model(dataset_model)

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())


        dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
        dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['date_pattern'] = '%Y %j'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch',
            ]



        ## Create the external dataset
        ds_id = self.dams_client.create_external_dataset(external_dataset=dset, external_dataset_model_id=dataset_model_id)
        ext_dprov_id = self.dams_client.create_external_data_provider(external_data_provider=dprov)

        # Register the ExternalDataset
        dproducer_id = self.dams_client.register_external_data_set(external_dataset_id=ds_id)

        ## Create the dataset agent
        datasetagent_obj = IonObject(RT.ExternalDatasetAgent,  name='ExternalDatasetAgent1', description='external data agent', handler_module=self.EDA_MOD, handler_class=self.EDA_CLS )
        self.datasetagent_id = self.dams_client.create_external_dataset_agent(external_dataset_agent=datasetagent_obj, external_dataset_model_id=dataset_model_id)

        # Generate the data product and associate it to the ExternalDataset
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        streamdef_id = self.pubsub_client.create_stream_definition(name="temp", parameter_dictionary_id=pdict.identifier)

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()


        dprod = IonObject(RT.DataProduct,
                          name='slocum_parsed_product',
                          description='parsed slocum product',
                          temporal_domain = tdom,
                          spatial_domain = sdom)

        self.dproduct_id = self.dataproductclient.create_data_product(data_product=dprod,
                                                                      stream_definition_id=streamdef_id)

        self.dams_client.assign_data_product(input_resource_id=ds_id, data_product_id=self.dproduct_id)

        #save the incoming slocum data
        self.dataproductclient.activate_data_product_persistence(self.dproduct_id)
        self.addCleanup(self.dataproductclient.suspend_data_product_persistence, self.dproduct_id)
        
        stream_ids, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_ids[0]

        dataset_id, assn = self.rrclient.find_objects(subject=self.dproduct_id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True)
        self.dataset_id = dataset_id[0]

        pid = self.create_logger('slocum_parsed_product', stream_id )
        self.loggerpids.append(pid)

        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id, #CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
            }

        # Create the logger for receiving publications
        #self.create_stream_and_logger(name='slocum',stream_id=stream_id)
        # Create agent config.
        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
コード例 #5
0
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    # DataHandler config
    DVR_CONFIG = {
        'dvr_mod': 'ion.agents.data.handlers.base_data_handler',
        'dvr_cls': 'DummyDataHandler',
    }

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)

#    @unittest.skip('not yet working. fix activate_data_product_persistence()')
#@unittest.skip()

    @unittest.skip('not working')
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel,
                                    name='ExampleDatasetModel',
                                    description="ExampleDatasetModel",
                                    datset_type="FibSeries")
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(
                datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s",
            str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent,
                                     name='datasetagent007',
                                     description="datasetagent007",
                                     handler_module=EDA_MOD,
                                     handler_class=EDA_CLS)
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(
                datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s",
            str(datasetAgent_id))

        # Create ExternalDataset
        log.debug(
            'TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset,
                                   name='ExtDataset',
                                   description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(
                extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %
                      ex)

        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ",
            str(extDataset_id))

        #register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(
            extDataset_id)

        # create a stream definition for the data from the ctd simulator

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        log.debug(
            "TestExternalDatasetAgentMgmt: new Stream Definition id = %s",
            str(ctd_stream_def_id))

        log.debug(
            "TestExternalDatasetAgentMgmt: Creating new data product with a stream definition"
        )
        dp_obj = IonObject(RT.DataProduct,
                           name='eoi dataset data',
                           description=' stream test')

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        data_product_id1 = self.dpclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s",
                  str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id,
                                            data_product_id=data_product_id1)

        #todo fix the problem here....
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s",
                  str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set('data', 'external_data')

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id':
            stream_id,  #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            'data_producer_id': dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            'taxonomy': tx.dump(),  #TODO: Currently does not support sets
            'max_records': 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config': self.DVR_CONFIG,
            'stream_config': self._stream_config,
            'agent': {
                'resource_id': EDA_RESOURCE_ID
            },
            'test_mode': True
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name='DatasetAgentInstance',
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(extDatasetAgentInstance_obj))
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s",
            str(extDatasetAgentInstance_id))

        #Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ",
            str(id), str(active))

        self.damsclient.start_external_dataset_agent_instance(
            extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(
            extDatasetAgentInstance_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ",
            str(id), str(active))

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,
                                               process=FakeProcess())
        print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s",
                  str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='resume')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='reset')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(
            extDatasetAgentInstance_id)

    def test_dataset_agent_prepare_support(self):

        eda_sup = self.damsclient.prepare_external_dataset_agent_support()

        eda_obj = IonObject(RT.ExternalDatasetAgent,
                            name="ExternalDatasetAgent")
        eda_id = self.damsclient.create_external_dataset_agent(eda_obj)

        eda_sup = self.damsclient.prepare_external_dataset_agent_support(
            external_dataset_agent_id=eda_id)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support(
        )

        edai_obj = IonObject(RT.ExternalDatasetAgentInstance,
                             name="ExternalDatasetAgentInstance")
        edai_id = self.damsclient.create_external_dataset_agent_instance(
            edai_obj)

        edai_sup = self.damsclient.prepare_external_dataset_agent_instance_support(
            external_dataset_agent_instance_id=edai_id)
コード例 #6
0
class TestPreloadThenLoadDataset(IonIntegrationTestCase):
    """ Uses the preload system to define the ExternalDataset and related resources,
        then invokes services to perform the load
    """

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        config = dict(op="load", scenario="NOSE", attachments="res/preload/r2_ioc/attachments")
        self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config)
        self.pubsub = PubsubManagementServiceClient()
        self.dams = DataAcquisitionManagementServiceClient()

    @unittest.skip("depricated test, now in mi repo")
    def test_use_case(self):
        # setUp() has already started the container and performed the preload
#        self.assert_dataset_loaded('Test External CTD Dataset') # make sure we have the ExternalDataset resources
        self.assert_dataset_loaded('Unit Test SMB37')           # association changed -- now use device name
        self.do_listen_for_incoming()                           # listen for any data being received from the dataset
        self.do_read_dataset()                                  # call services to load dataset
        self.assert_data_received()                             # check that data was received as expected
        self.do_shutdown()

    def assert_dataset_loaded(self, name):
        rr = self.container.resource_registry
#        self.external_dataset = self.find_object_by_name(name, RT.ExternalDataset)
        devs, _ = rr.find_resources(RT.InstrumentDevice, name=name, id_only=False)
        self.assertEquals(len(devs), 1)
        self.device = devs[0]
        obj,_ = rr.find_objects(subject=self.device._id, predicate=PRED.hasAgentInstance, object_type=RT.ExternalDatasetAgentInstance)
        self.agent_instance = obj[0]
        obj,_ = rr.find_objects(object_type=RT.ExternalDatasetAgent, predicate=PRED.hasAgentDefinition, subject=self.agent_instance._id)
        self.agent = obj[0]

        driver_cfg = self.agent_instance.driver_config
        #stream_definition_id = driver_cfg['dh_cfg']['stream_def'] if 'dh_cfg' in driver_cfg else driver_cfg['stream_def']
        #self.stream_definition = rr.read(stream_definition_id)

        self.data_product = rr.read_object(subject=self.device._id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct)

        self.dataset_id = rr.read_object(subject=self.data_product._id, predicate=PRED.hasDataset, object_type=RT.Dataset, id_only=True)

        ids,_ = rr.find_objects(subject=self.data_product._id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        self.stream_id = ids[0]
        self.route = self.pubsub.read_stream_route(self.stream_id)

    def do_listen_for_incoming(self):
        subscription_id = self.pubsub.create_subscription('validator', data_product_ids=[self.data_product._id])
        self.addCleanup(self.pubsub.delete_subscription, subscription_id)

        self.granule_capture = []
        self.granule_count = 0
        def on_granule(msg, route, stream_id):
            self.granule_count += 1
            if self.granule_count < 5:
                self.granule_capture.append(msg)
        validator = StandaloneStreamSubscriber('validator', callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub.deactivate_subscription, subscription_id)

        self.dataset_modified = Event()
        def cb2(*args, **kwargs):
            self.dataset_modified.set()
            # TODO: event isn't using the ExternalDataset, but a different ID for a Dataset
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb2, origin=self.dataset_id)
        es.start()
        self.addCleanup(es.stop)

    def do_read_dataset(self):
        self.dams.start_external_dataset_agent_instance(self.agent_instance._id)
        #
        # should i wait for process (above) to start
        # before launching client (below)?
        #
        self.client = None
        end = time.time() + MAX_AGENT_START_TIME
        while not self.client and time.time() < end:
            try:
                self.client = ResourceAgentClient(self.device._id, process=FakeProcess())
            except NotFound:
                time.sleep(2)
        if not self.client:
            self.fail(msg='external dataset agent process did not start in %d seconds' % MAX_AGENT_START_TIME)
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.INITIALIZE))
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.GO_ACTIVE))
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.RUN))
        self.client.execute_resource(command=AgentCommand(command=DriverEvent.START_AUTOSAMPLE))

    def assert_data_received(self):

        #let it go for up to 120 seconds, then stop the agent and reset it
        if not self.dataset_modified.is_set():
            self.dataset_modified.wait(30)
        self.assertTrue(self.granule_count > 2, msg='granule count = %d'%self.granule_count)

        rdt = RecordDictionaryTool.load_from_granule(self.granule_capture[0])
        self.assertAlmostEqual(0, rdt['oxygen'][0], delta=0.01)
        self.assertAlmostEqual(309.77, rdt['pressure'][0], delta=0.01)
        self.assertAlmostEqual(37.9848, rdt['conductivity'][0], delta=0.01)
        self.assertAlmostEqual(9.5163, rdt['temp'][0], delta=0.01)
        self.assertAlmostEqual(3527207897.0, rdt['time'][0], delta=1)

    def do_shutdown(self):
        self.dams.stop_external_dataset_agent_instance(self.agent_instance._id)
コード例 #7
0
class TestPreloadThenLoadDataset(IonIntegrationTestCase):
    """ replicates the TestHypm_WPF_CTD test (same handler/parser/data file)
        but uses the preload system to define the ExternalDataset and related resources,
        then invokes services to perform the load
    """

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        config = dict(op="load", scenario="BETA,NOSE", attachments="res/preload/r2_ioc/attachments")
        self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config)
        self.pubsub = PubsubManagementServiceClient()
        self.dams = DataAcquisitionManagementServiceClient()

    def find_object_by_name(self, name, resource_type):
        objects,_ = self.container.resource_registry.find_resources(resource_type)
        self.assertTrue(len(objects) >= 1)
#        filtered_objs = [obj for obj in objects if obj.name == name]
        filtered_objs = []
        for obj in objects:
            if obj.name==name:
                filtered_objs.append(obj)
        self.assertEquals(len(filtered_objs), 1, msg='Found %d objects with name %s'%(len(filtered_objs),name))
        return filtered_objs[0]

    def test_use_case(self):
        # setUp() has already started the container and performed the preload
#        self.assert_dataset_loaded('Test External CTD Dataset') # make sure we have the ExternalDataset resources
        self.assert_dataset_loaded('Unit Test SMB37')           # association changed -- now use device name
        self.do_listen_for_incoming()                           # listen for any data being received from the dataset
        self.do_read_dataset()                                  # call services to load dataset
        self.assert_data_received()                             # check that data was received as expected
        self.do_shutdown()

    def assert_dataset_loaded(self, name):
#        self.external_dataset = self.find_object_by_name(name, RT.ExternalDataset)
        self.device = self.find_object_by_name(name, RT.InstrumentDevice)
        rr = self.container.resource_registry
        obj,_ = rr.find_objects(subject=self.device._id, predicate=PRED.hasAgentInstance, object_type=RT.ExternalDatasetAgentInstance)
        self.agent_instance = obj[0]
        obj,_ = rr.find_objects(object_type=RT.ExternalDatasetAgent, predicate=PRED.hasAgentDefinition, subject=self.agent_instance._id)
        self.agent = obj[0]
        stream_definition_id = self.agent_instance.dataset_driver_config['dh_cfg']['stream_def'] if 'dh_cfg' in self.agent_instance.dataset_driver_config else self.agent_instance.dataset_driver_config['stream_def']
        self.stream_definition = rr.read(stream_definition_id)
#        data_producer_id = self.agent_instance.dataset_driver_config['dh_cfg']['data_producer_id'] if 'dh_cfg' in self.agent_instance.dataset_driver_config else self.agent_instance.dataset_driver_config['data_producer_id']
#        self.data_producer = rr.read(data_producer_id) #subject="", predicate="", object_type="", assoc="", id_only=False)
#        self.data_product = rr.read_object(object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, subject=self.external_dataset._id)
        self.data_product = rr.read_object(object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, subject=self.device._id)
        ids,_ = rr.find_objects(self.data_product._id, PRED.hasStream, RT.Stream, id_only=True)
        self.stream_id = ids[0]
        self.route = self.pubsub.read_stream_route(self.stream_id)

    def do_listen_for_incoming(self):
        subscription_id = self.pubsub.create_subscription('validator', data_product_ids=[self.data_product._id])
        self.addCleanup(self.pubsub.delete_subscription, subscription_id)

        self.granule_capture = []
        self.granule_count = 0
        def on_granule(msg, route, stream_id):
            self.granule_count += 1
            if self.granule_count<5:
                self.granule_capture.append(msg)
        validator = StandaloneStreamSubscriber('validator', callback=on_granule)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub.deactivate_subscription, subscription_id)

        self.dataset_modified = Event()
        def cb2(*args, **kwargs):
            self.dataset_modified.set()
            # TODO: event isn't using the ExternalDataset, but a different ID for a Dataset
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb2, origin=self.device._id)
        es.start()
        self.addCleanup(es.stop)

    def do_read_dataset(self):

        self.dams.start_external_dataset_agent_instance(self.agent_instance._id)
        #
        # should i wait for process (above) to start
        # before launching client (below)?
        #
        self.client = ResourceAgentClient(self.device._id, process=FakeProcess())
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.INITIALIZE))
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.GO_ACTIVE))
        self.client.execute_agent(AgentCommand(command=ResourceAgentEvent.RUN))
        self.client.execute_resource(command=AgentCommand(command=DriverEvent.START_AUTOSAMPLE))

    def assert_data_received(self):

        #let it go for up to 120 seconds, then stop the agent and reset it
        if not self.dataset_modified.is_set():
            self.dataset_modified.wait(30)
        self.assertTrue(self.granule_count>2, msg='granule count = %d'%self.granule_count)

        rdt = RecordDictionaryTool.load_from_granule(self.granule_capture[0])
        self.assertAlmostEqual(0, rdt['oxygen'][0], delta=0.01)
        self.assertAlmostEqual(309.77, rdt['pressure'][0], delta=0.01)
        self.assertAlmostEqual(37.9848, rdt['conductivity'][0], delta=0.01)
        self.assertAlmostEqual(9.5163, rdt['temp'][0], delta=0.01)
        self.assertAlmostEqual(1318219097, rdt['time'][0], delta=1)

    def do_shutdown(self):
        self.dams.stop_external_dataset_agent_instance(self.agent_instance._id)
コード例 #8
0
class DatasetAgentTestCase(IonIntegrationTestCase):
    """
    Base class for all coi dataset agent end to end tests
    """
    test_config = DatasetAgentTestConfig()

    def setUp(self, deploy_file=DEPLOY_FILE):

        """
        Start container.
        Start deploy services.
        Define agent config, start agent.
        Start agent client.
        """
        self._dsa_client = None

        # Ensure we have a good test configuration
        self.test_config.verify()

        # Start container.
        log.info('Staring capability container.')
        self._start_container()

        # Bring up services in a deploy file (no need to message)
        log.info('Staring deploy services. %s', deploy_file)
        self.container.start_rel_from_url(DEPLOY_FILE)

        # Load instrument specific parameters
        log.info('Loading additional scenarios')
        self._load_params()

        # Start data subscribers
        self._build_stream_config()
        self._start_data_subscribers()

        # Start a resource agent client to talk with the instrument agent.
        log.info('starting DSA process')
        self._dsa_client = self._start_dataset_agent_process()
        self.addCleanup(self.assert_reset)
        log.info('test setup complete')

    ###
    #   Test/Agent Startup Helpers
    ###
    def _load_params(self):
        """
        Load specific instrument specific parameters from preload
        """
        scenario = None
        categories = None

        if PRELOAD_CATEGORIES:
            categories = ",".join(PRELOAD_CATEGORIES)

        # load_parameter_scenarios
        if PRELOAD_SCENARIO:
            scenario = PRELOAD_SCENARIO
        else:
            log.warn("No common preload defined.  Was this intentional?")

        if self.test_config.preload_scenario:
            if scenario:
                scenario = "%s,%s" % (scenario, self.test_config.preload_scenario)
            else:
                scenario = self.test_config.preload_scenario
        else:
            log.warn("No DSA specific preload defined.  Was this intentional?")

        log.debug("doing preload now: %s", scenario)
        if scenario:
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario=scenario,
                path="master",
                categories=categories,
                clearcols="owner_id,org_ids",
                assets="res/preload/r2_ioc/ooi_assets",
                parseooi="True",
            ))

    def _start_dataset_agent_process(self):
        # Create agent config.
        name = self.test_config.instrument_device_name
        rr = self.container.resource_registry

        log.debug("Start dataset agent process for instrument device: %s", name)
        objects,_ = rr.find_resources(RT.InstrumentDevice)
        log.debug("Found Instrument Devices: %s", objects)

        filtered_objs = [obj for obj in objects if obj.name == name]
        if (filtered_objs) == []:
            raise ConfigNotFound("No appropriate InstrumentDevice objects loaded")

        instrument_device = filtered_objs[0]
        log.trace("Found instrument device: %s", instrument_device)

        dsa_instance = rr.read_object(subject=instrument_device._id,
                                     predicate=PRED.hasAgentInstance,
                                     object_type=RT.ExternalDatasetAgentInstance)

        log.debug("dsa_instance found: %s", dsa_instance)
        self._driver_config = dsa_instance.driver_config

        self.clear_sample_data()

        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        proc_id = self.damsclient.start_external_dataset_agent_instance(dsa_instance._id)
        client = ResourceAgentClient(instrument_device._id, process=FakeProcess())

        return client

    ###
    #   Data file helpers
    ###

    def _get_source_data_file(self, filename):
        """
        Search for a sample data file, first check the driver resource directory
        then just use the filename as a path.  If the file doesn't exists
        raise an exception
        @param filename name or path of the file to search for
        @return full path to the found data file
        @raise IonException if the file isn't found
        """
        resource_dir = self.test_config.test_resource_dir
        source_path = os.path.join(resource_dir, filename)

        log.debug("Search for resource file (%s) in %s", filename, resource_dir)
        if os.path.isfile(source_path):
            log.debug("Found %s in resource directory", filename)
            return source_path

        log.debug("Search for resource file (%s) in current directory", filename)
        if os.path.isfile(filename):
            log.debug("Found %s in the current directory", filename)
            return filename

        raise IonException("Data file %s does not exist", filename)

    def create_data_dir(self):
        """
        Verify the test data directory is created and exists.  Return the path to
        the directory.
        @return: path to data directory
        @raise: ConfigNotFound no harvester config
        @raise: IonException if data_dir exists, but not a directory
        """
        startup_config = self._driver_config.get('startup_config')
        if not startup_config:
            raise ConfigNotFound("Driver config missing 'startup_config'")

        harvester_config = startup_config.get('harvester')
        if not harvester_config:
            raise ConfigNotFound("Startup config missing 'harvester' config")

        data_dir = harvester_config.get("directory")
        if not data_dir:
            raise ConfigNotFound("Harvester config missing 'directory'")

        if not os.path.exists(data_dir):
            log.debug("Creating data dir: %s", data_dir)
            os.makedirs(data_dir)

        elif not os.path.isdir(data_dir):
            raise IonException("'data_dir' is not a directory")

        return data_dir

    def clear_sample_data(self):
        """
        Remove all files from the sample data directory
        """
        data_dir = self.create_data_dir()

        log.debug("Clean all data from %s", data_dir)
        self.remove_all_files(data_dir)

    def create_sample_data(self, filename, dest_filename=None):
        """
        Search for a data file in the driver resource directory and if the file
        is not found there then search using the filename directly.  Then copy
        the file to the test data directory.

        If a dest_filename is supplied it will be renamed in the destination
        directory.
        @param: filename - filename or path to a data file to copy
        @param: dest_filename - name of the file when copied. default to filename
        """
        data_dir = self.create_data_dir()
        source_path = self._get_source_data_file(filename)

        log.debug("DIR: %s", data_dir)
        if dest_filename is None:
            dest_path = os.path.join(data_dir, os.path.basename(source_path))
        else:
            dest_path = os.path.join(data_dir, dest_filename)

        log.debug("Creating data file src: %s, dest: %s", source_path, dest_path)
        shutil.copy2(source_path, dest_path)

    def remove_all_files(self, dir_name):
        """
        Remove all files from a directory.  Raise an exception if the directory contains something
        other than files.
        @param dir_name directory path to remove files.
        @raise RuntimeError if the directory contains anything except files.
        """
        for file_name in os.listdir(dir_name):
            file_path = os.path.join(dir_name, file_name)
            if not os.path.isfile(file_path):
                raise RuntimeError("%s is not a file", file_path)

        for file_name in os.listdir(dir_name):
            file_path = os.path.join(dir_name, file_name)
            os.unlink(file_path)

    ###############################################################################
    # Event helpers.
    ###############################################################################

    def _start_event_subscriber(self, type='ResourceAgentEvent', count=0):
        """
        Start a subscriber to the instrument agent events.
        @param type The type of event to catch.
        @count Trigger the async event result when events received reaches this.
        """
        def consume_event(*args, **kwargs):
            log.info('Test recieved ION event: args=%s, kwargs=%s, event=%s.',
                     str(args), str(kwargs), str(args[0]))
            self._events_received.append(args[0])
            if self._event_count > 0 and \
                self._event_count == len(self._events_received):
                self._async_event_result.set()

        # Event array and async event result.
        self._event_count = count
        self._events_received = []
        self._async_event_result = AsyncResult()

        self._event_subscriber = EventSubscriber(
            event_type=type, callback=consume_event,
            origin=IA_RESOURCE_ID)
        self._event_subscriber.start()
        self._event_subscriber._ready_event.wait(timeout=5)

    def _stop_event_subscriber(self):
        """
        Stop event subscribers on cleanup.
        """
        self._event_subscriber.stop()
        self._event_subscriber = None

    ###############################################################################
    # Data stream helpers.
    ###############################################################################

    def _build_stream_config(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        dataset_management = DatasetManagementServiceClient()

        encoder = IonObjectSerializer()

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}

        stream_name = 'ctdpf_parsed'
        param_dict_name = 'ctdpf_parsed'
        pd_id = dataset_management.read_parameter_dictionary_by_name(param_dict_name, id_only=True)
        stream_def_id = pubsub_client.create_stream_definition(name=stream_name, parameter_dictionary_id=pd_id)
        stream_def = pubsub_client.read_stream_definition(stream_def_id)
        stream_def_dict = encoder.serialize(stream_def)
        pd = stream_def.parameter_dictionary
        stream_id, stream_route = pubsub_client.create_stream(name=stream_name,
                                                exchange_point='science_data',
                                                stream_definition_id=stream_def_id)
        stream_config = dict(routing_key=stream_route.routing_key,
                                 exchange_point=stream_route.exchange_point,
                                 stream_id=stream_id,
                                 parameter_dictionary=pd,
                                 stream_def_dict=stream_def_dict)
        self._stream_config[stream_name] = stream_config

    def _start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._raw_samples_received = []
        self._async_sample_result = AsyncResult()
        self._async_raw_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received parsed data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)

        from pyon.util.containers import create_unique_identifier

        stream_name = 'ctdpf_parsed'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" %
                    stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)

    def _purge_queue(self, queue):
        xn = self.container.ex_manager.create_xn_queue(queue)
        xn.purge()

    def _stop_data_subscribers(self):
        for subscriber in self._data_subscribers:
            pubsub_client = PubsubManagementServiceClient()
            if hasattr(subscriber,'subscription_id'):
                try:
                    pubsub_client.deactivate_subscription(subscriber.subscription_id)
                except:
                    pass
                pubsub_client.delete_subscription(subscriber.subscription_id)
            subscriber.stop()

    ###
    #   Common assert methods
    ###

    def assert_initialize(self, final_state = ResourceAgentState.STREAMING):
        '''
        Walk through DSA states to get to streaming mode from uninitialized
        '''
        state = self._dsa_client.get_agent_state()

        with self.assertRaises(Conflict):
            res_state = self._dsa_client.get_resource_state()

        log.debug("Initialize DataSet agent")
        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.INACTIVE)
        log.info("Sent INITIALIZE; DSA state = %s", state)

        log.debug("DataSet agent go active")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent GO_ACTIVE; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.IDLE)

        log.debug("DataSet agent run")
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent RUN; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.COMMAND)

        if final_state == ResourceAgentState.STREAMING:
            self.assert_start_sampling()

    def assert_stop_sampling(self):
        '''
        transition to command.  Must be called from streaming
        '''
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.STREAMING)

        log.debug("DataSet agent start sampling")
        cmd = AgentCommand(command='DRIVER_EVENT_STOP_AUTOSAMPLE')
        retval = self._dsa_client.execute_resource(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent START SAMPLING; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.COMMAND)

    def assert_start_sampling(self):
        '''
        transition to sampling.  Must be called from command
        '''
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        log.debug("DataSet agent start sampling")
        cmd = AgentCommand(command='DRIVER_EVENT_START_AUTOSAMPLE')
        retval = self._dsa_client.execute_resource(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent START SAMPLING; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.STREAMING)

    def assert_reset(self):
        '''
        Put the instrument back in uninitialized
        '''
        if self._dsa_client is None:
            return

        state = self._dsa_client.get_agent_state()

        if state != ResourceAgentState.UNINITIALIZED:
            cmd = AgentCommand(command=ResourceAgentEvent.RESET)
            retval = self._dsa_client.execute_agent(cmd)
            state = self._dsa_client.get_agent_state()

        self.assertEqual(state, ResourceAgentState.UNINITIALIZED)
コード例 #9
0
class DatasetAgentTestCase(IonIntegrationTestCase):
    """
    Base class for all dataset agent end to end tests
    """
    test_config = DatasetAgentTestConfig()

    def setUp(self, deploy_file=DEPLOY_FILE):
        """
        Start container.
        Start deploy services.
        Define agent config, start agent.
        Start agent client.
        """
        self._dsa_client = None
        self.dams = DataAcquisitionManagementServiceClient()

        # Ensure we have a good test configuration
        self.test_config.verify()

        # Start container.
        log.info('Staring capability container.')
        self._start_container()
        self.rr = self.container.resource_registry

        # Bring up services in a deploy file (no need to message)
        log.info('Starting deploy services. %s', deploy_file)
        self.container.start_rel_from_url(DEPLOY_FILE)

        # Load instrument specific parameters
        log.info('Preload test scenarios')
        self._load_params()

        # Start a resource agent client to talk with the instrument agent.
        log.info('Starting DSA process')
        self._dsa_client = self._start_dataset_agent_process()
        log.debug("Client created: %s", type(self._dsa_client))
        self.addCleanup(self._stop_dataset_agent_process)
        log.info('test setup complete')

        # Start data subscribers
        self._start_data_subscribers()
        self.addCleanup(self._stop_data_subscribers)

    ###
    #   Test/Agent Startup Helpers
    ###
    def _load_params(self):
        """
        Do a second round of preload with instrument specific scenarios
        """
        scenario = None
        categories = None

        if PRELOAD_CATEGORIES:
            categories = ",".join(PRELOAD_CATEGORIES)

        # load_parameter_scenarios
        if PRELOAD_SCENARIO:
            scenario = PRELOAD_SCENARIO
        else:
            log.warn("No common preload defined.  Was this intentional?")

        if self.test_config.preload_scenario:
            scenario = "%s,%s" % (scenario, self.test_config.preload_scenario) if scenario else self.test_config.preload_scenario
        else:
            log.warn("No DSA specific preload defined.  Was this intentional?")

        if scenario:
            preload_config=dict(
                op="load",
                scenario=scenario,
                #path="master",
                path=TESTED_DOC,
                categories=categories,
                clearcols="owner_id,org_ids",
                #assets="res/preload/r2_ioc/ooi_assets",
                #parseooi="True",
            )
            log.debug("Starting preload now: config=%s", preload_config)
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", preload_config)

    def _start_dataset_agent_process(self):
        """
        Launch the agent process and store the configuration.  Tried
        to emulate the same process used by import_data.py
        """
        instrument_device, dsa_instance = self._get_dsa_instance()
        self._driver_config = dsa_instance.driver_config

        self._update_dsa_config(dsa_instance)
        self._update_harvester_config(dsa_instance)

        self._dsa_instance = dsa_instance
        self.clear_sample_data()

        # Return a resource agent client
        return self._get_dsa_client(instrument_device, dsa_instance)

    def _stop_dataset_agent_process(self):
        """
        Stop the dataset agent instance
        """
        self.assert_reset()
        self.dams.stop_external_dataset_agent_instance(self._dsa_instance._id)

    def _get_dsa_instance(self):
        """
        Find the dsa instance in preload and return an instance of that object
        """
        name = self.test_config.instrument_device_name

        log.debug("Start dataset agent process for instrument device: %s", name)
        objects,_ = self.rr.find_resources(RT.InstrumentDevice, name=name)
        log.debug("Found Instrument Devices: %s", objects)
        if not objects:
            raise ConfigNotFound("No appropriate InstrumentDevice objects loaded")

        instrument_device = objects[0]
        log.trace("Found instrument device: %s", instrument_device)

        dsa_instance = self.rr.read_object(subject=instrument_device._id,
                                     predicate=PRED.hasAgentInstance,
                                     object_type=RT.ExternalDatasetAgentInstance)

        log.info("dsa_instance found: %s", dsa_instance)

        return instrument_device, dsa_instance

    def _update_harvester_config(self, dsa_instance):
        """
        Update the harvester config such that we change the directory to something
        we have write permissions.
        """
        log.info("dsa agent instance: %s", dsa_instance)
        driver_config = dsa_instance.driver_config

        log.info("dsa agent driver config: %s", driver_config)
        driver_config['startup_config']['harvester']['directory'] = self.test_config.data_dir

        log.info("updated driver config: %s", driver_config)
        dsa_instance.driver_config = driver_config

        self.rr.update(dsa_instance)

    def _update_dsa_config(self, dsa_instance):
        """
        Update the dsa configuration prior to loading the agent.  This is where we can
        alter production configurations for use in a controlled test environment.
        """
        dsa_obj = self.rr.read_object(
            object_type=RT.ExternalDatasetAgent, predicate=PRED.hasAgentDefinition, subject=dsa_instance._id, id_only=False)

        log.info("dsa agent definition found: %s", dsa_obj)

        # If we don't want to load from an egg then we need to
        # alter the driver config read from preload
        if self.test_config.mi_repo is not None:
            dsa_obj.driver_uri = None
            # Strip the custom namespace
            dsa_obj.driver_module = ".".join(dsa_obj.driver_module.split('.')[1:])

            log.info("saving new dsa agent config: %s", dsa_obj)
            self.rr.update(dsa_obj)

            if not self.test_config.mi_repo in sys.path: sys.path.insert(0, self.test_config.mi_repo)

            log.debug("Driver module: %s", dsa_obj.driver_module)
            log.debug("MI Repo: %s", self.test_config.mi_repo)
            log.trace("Sys Path: %s", sys.path)

    def _get_dsa_client(self, instrument_device, dsa_instance):
        """
        Launch the agent and return a client
        """
        fake_process = FakeProcess()
        fake_process.container = self.container

        clients = DataAcquisitionManagementServiceDependentClients(fake_process)
        config_builder = ExternalDatasetAgentConfigurationBuilder(clients)

        try:
            config_builder.set_agent_instance_object(dsa_instance)
            self.agent_config = config_builder.prepare()
        except Exception as e:
            log.error('failed to launch: %s', e, exc_info=True)
            raise ServerError('failed to launch')

        self._dsa_pid = self.dams.start_external_dataset_agent_instance(dsa_instance._id)
        log.debug("_get_dsa_client CFG")
        return ResourceAgentClient(instrument_device._id, process=FakeProcess())

        dispatcher = ProcessDispatcherServiceClient()
        launcher = AgentLauncher(dispatcher)

        log.debug("Launching agent process!")

        self._dsa_pid = launcher.launch(self.agent_config, config_builder._get_process_definition()._id)
        if not self._dsa_pid:
            raise ServerError("Launched external dataset agent instance but no process_id")
        config_builder.record_launch_parameters(self.agent_config)

        launcher.await_launch(10.0)
        return ResourceAgentClient(instrument_device._id, process=FakeProcess())

    def _get_dsa_object_state(self):
        state, _id = self.container.state_repository.get_state(self._dsa_pid)
        log.debug("agent_state (%s): %s", self._dsa_pid, state)

        driver_state = state.get(DSA_STATE_KEY)
        log.debug("driver_state (%s): %s", self._dsa_pid, driver_state)

        return driver_state

    ###
    #   Data file helpers
    ###

    def _get_source_data_file(self, filename):
        """
        Search for a sample data file, first check the driver resource directory
        then just use the filename as a path.  If the file doesn't exists
        raise an exception
        @param filename name or path of the file to search for
        @return full path to the found data file
        @raise IonException if the file isn't found
        """
        resource_dir = self.test_config.test_resource_dir
        source_path = os.path.join(resource_dir, filename)

        log.debug("Search for resource file (%s) in %s", filename, resource_dir)
        if os.path.isfile(source_path):
            log.debug("Found %s in resource directory", filename)
            return source_path

        log.debug("Search for resource file (%s) in current directory", filename)
        if os.path.isfile(filename):
            log.debug("Found %s in the current directory", filename)
            return filename

        raise IonException("Data file %s does not exist", filename)

    def create_data_dir(self):
        """
        Verify the test data directory is created and exists.  Return the path to
        the directory.
        @return: path to data directory
        @raise: ConfigNotFound no harvester config
        @raise: IonException if data_dir exists, but not a directory
        """
        startup_config = self._driver_config.get('startup_config')
        if not startup_config:
            raise ConfigNotFound("Driver config missing 'startup_config'")

        harvester_config = startup_config.get('harvester')
        if not harvester_config:
            raise ConfigNotFound("Startup config missing 'harvester' config")

        data_dir = harvester_config.get("directory")
        if not data_dir:
            raise ConfigNotFound("Harvester config missing 'directory'")

        if not os.path.exists(data_dir):
            log.debug("Creating data dir: %s", data_dir)
            os.makedirs(data_dir)

        elif not os.path.isdir(data_dir):
            raise IonException("'data_dir' is not a directory")

        return data_dir

    def clear_sample_data(self):
        """
        Remove all files from the sample data directory
        """
        data_dir = self.create_data_dir()

        log.debug("Clean all data from %s", data_dir)
        self.remove_all_files(data_dir)

    def create_sample_data(self, filename, dest_filename=None):
        """
        Search for a data file in the driver resource directory and if the file
        is not found there then search using the filename directly.  Then copy
        the file to the test data directory.

        If a dest_filename is supplied it will be renamed in the destination
        directory.
        @param: filename - filename or path to a data file to copy
        @param: dest_filename - name of the file when copied. default to filename
        """
        data_dir = self.create_data_dir()
        source_path = self._get_source_data_file(filename)

        log.debug("DIR: %s", data_dir)
        if dest_filename is None:
            dest_path = os.path.join(data_dir, os.path.basename(source_path))
        else:
            dest_path = os.path.join(data_dir, dest_filename)

        log.debug("Creating data file src: %s, dest: %s", source_path, dest_path)
        shutil.copy2(source_path, dest_path)

        return dest_path

    def remove_all_files(self, dir_name):
        """
        Remove all files from a directory.  Raise an exception if the directory contains something
        other than files.
        @param dir_name directory path to remove files.
        @raise RuntimeError if the directory contains anything except files.
        """
        for file_name in os.listdir(dir_name):
            file_path = os.path.join(dir_name, file_name)
            if not os.path.isfile(file_path):
                raise RuntimeError("%s is not a file", file_path)

        for file_name in os.listdir(dir_name):
            file_path = os.path.join(dir_name, file_name)
            os.unlink(file_path)

    ###############################################################################
    # Event helpers.
    ###############################################################################

    def _start_event_subscriber(self, type='ResourceAgentEvent', count=0):
        """
        Start a subscriber to the instrument agent events.
        @param type The type of event to catch.
        @count Trigger the async event result when events received reaches this.
        """
        def consume_event(*args, **kwargs):
            log.info('Test recieved ION event: args=%s, kwargs=%s, event=%s.',
                     str(args), str(kwargs), str(args[0]))
            self._events_received.append(args[0])
            if self._event_count > 0 and \
                self._event_count == len(self._events_received):
                self._async_event_result.set()

        # Event array and async event result.
        self._event_count = count
        self._events_received = []
        self._async_event_result = AsyncResult()

        self._event_subscriber = EventSubscriber(
            event_type=type, callback=consume_event,
            origin="IA_RESOURCE_ID") #TODO
        self._event_subscriber.start()
        self._event_subscriber._ready_event.wait(timeout=5)

    def _stop_event_subscriber(self):
        """
        Stop event subscribers on cleanup.
        """
        self._event_subscriber.stop()
        self._event_subscriber = None

    ###############################################################################
    # Data stream helpers.
    ###############################################################################
    def _start_data_subscribers(self):
        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            if self._samples_received.get(stream_id) is None:
                self._samples_received[stream_id] = []

            log.info('Received parsed data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received[stream_id].append(message)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = {}
        self._stream_id_map = {}
        stream_config = self.agent_config['stream_config']

        log.info("starting data subscribers")

        for stream_name in stream_config.keys():
            log.debug("Starting data subscriber for stream '%s'", stream_name)
            stream_id = stream_config[stream_name]['stream_id']
            self._stream_id_map[stream_name] = stream_id
            self._start_data_subscriber(stream_config[stream_name], recv_data)

    def _start_data_subscriber(self, config, callback):
        """
        Setup and start a data subscriber
        """
        exchange_point = config['exchange_point']
        stream_id = config['stream_id']

        sub = StandaloneStreamSubscriber(exchange_point, callback)
        sub.start()
        self._data_subscribers.append(sub)

        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        sub_id = pubsub_client.create_subscription(name=exchange_point, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)

    def make_data_product(self, pdict_name, dp_name, available_fields=None):
        self.pubsub_management = PubsubManagementServiceClient()
        if available_fields is None: available_fields = []
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(pdict_name, id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition('%s stream_def' % dp_name, parameter_dictionary_id=pdict_id, available_fields=available_fields or None)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(name=dp_name)
        dp_obj.temporal_domain = tdom
        dp_obj.spatial_domain = sdom
        data_product_id = self.data_product_management.create_data_product(dp_obj, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
        return data_product_id

    def _stop_data_subscribers(self):
        for subscriber in self._data_subscribers:
            pubsub_client = PubsubManagementServiceClient()
            if hasattr(subscriber,'subscription_id'):
                try:
                    pubsub_client.deactivate_subscription(subscriber.subscription_id)
                except:
                    pass
                pubsub_client.delete_subscription(subscriber.subscription_id)
            subscriber.stop()

    def get_samples(self, stream_name, sample_count=1, timeout=30):
        """
        listen on a stream until 'sample_count' samples are read and return
        a list of all samples read.  If the required number of samples aren't
        read then throw an exception.

        Note that this method does not clear the sample queue for the stream.
        This should be done explicitly by the caller.  However, samples that
        are consumed by this method are removed.

        @raise SampleTimeout - if the required number of samples aren't read
        """
        to = gevent.Timeout(timeout)
        to.start()
        done = False
        result = []
        i = 0

        log.debug("Fetch %s sample(s) from stream '%s'" % (sample_count, stream_name))

        stream_id = self._stream_id_map.get(stream_name)
        log.debug("Stream ID Map: %s ", self._stream_id_map)
        self.assertIsNotNone(stream_id, msg="Unable to find stream name '%s'" % stream_name)

        try:
            while(not done):
                if (self._samples_received.has_key(stream_id) and
                   len(self._samples_received.get(stream_id))):
                    log.trace("get_samples() received sample #%d!", i)
                    result.append(self._samples_received[stream_id].pop(0))
                    i += 1

                    if i >= sample_count:
                        done = True

                else:
                    log.debug("No samples in %s. Sleep a bit to wait for the data queue to fill up.", stream_name)
                    gevent.sleep(1)

        except Timeout:
            log.error("Failed to get %d records from %s.  received: %d", sample_count, stream_name, i)
            self.fail("Failed to read samples from stream %s", stream_name)
        finally:
            to.cancel()
            return result

    def remove_sample_dir(self):
        """
        Remove the sample dir and all files
        """
        data_dir = self.create_data_dir()
        self.clear_sample_data()
        os.rmdir(data_dir)

    ###
    #   Common assert methods
    ###

    def assertDictEqual(self, d1, d2, msg=None): # assertEqual uses for dicts
        for k,v1 in d1.iteritems():
            self.assertIn(k, d2, msg)
            v2 = d2[k]
            if(isinstance(v1, collections.Iterable) and
               not isinstance(v1, basestring)):
                self.assertItemsEqual(v1, v2, msg)
            else:
                self.assertEqual(v1, v2, msg)
        return True

    def assert_initialize(self, final_state = ResourceAgentState.STREAMING):
        '''
        Walk through DSA states to get to streaming mode from uninitialized
        '''
        state = self._dsa_client.get_agent_state()

        with self.assertRaises(Conflict):
            res_state = self._dsa_client.get_resource_state()

        log.debug("Initialize DataSet agent")
        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.INACTIVE)
        log.info("Sent INITIALIZE; DSA state = %s", state)

        log.debug("DataSet agent go active")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent GO_ACTIVE; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.IDLE)

        log.debug("DataSet agent run")
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent RUN; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.COMMAND)

        if final_state == ResourceAgentState.STREAMING:
            self.assert_start_sampling()

    def assert_stop_sampling(self):
        '''
        transition to command.  Must be called from streaming
        '''
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.STREAMING)

        log.debug("DataSet agent start sampling")
        cmd = AgentCommand(command='DRIVER_EVENT_STOP_AUTOSAMPLE')
        retval = self._dsa_client.execute_resource(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent START SAMPLING; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.COMMAND)

    def assert_start_sampling(self):
        '''
        transition to sampling.  Must be called from command
        '''
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        log.debug("DataSet agent start sampling")
        cmd = AgentCommand(command='DRIVER_EVENT_START_AUTOSAMPLE')
        retval = self._dsa_client.execute_resource(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent START SAMPLING; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.STREAMING)

    def assert_reset(self):
        '''
        Put the instrument back in uninitialized
        '''
        if self._dsa_client is None:
            return

        state = self._dsa_client.get_agent_state()

        if state != ResourceAgentState.UNINITIALIZED:
            cmd = AgentCommand(command=ResourceAgentEvent.RESET)
            retval = self._dsa_client.execute_agent(cmd)
            state = self._dsa_client.get_agent_state()

        self.assertEqual(state, ResourceAgentState.UNINITIALIZED)

    def assert_data_values(self, granules, dataset_definition_file):
        """
        Verify granules match the granules defined in the definition file
        """
        rs_file = self._get_source_data_file(dataset_definition_file)
        rs = ResultSet(rs_file)

        self.assertTrue(rs.verify(granules), msg="Failed data validation.  See log for details")

    def assert_sample_queue_size(self, stream_name, size):
        """
        verify a sample queue is the size we expect it to be.
        """
        # Sleep a couple seconds to ensure the
        gevent.sleep(2)

        stream_id = self._stream_id_map.get(stream_name)
        length = 0
        if stream_id in self._samples_received:
            length = len(self._samples_received[stream_id])
        self.assertEqual(length, size, msg="Queue size != expected size (%d != %d)" % (length, size))

    def assert_set_pubrate(self, rate):
        """
        Set the pubrate for the parsed data stream.  Set to 0 for
        no buffering
        """
        self.assertIsInstance(rate, (int, float))
        self.assertGreaterEqual(rate, 0)

        expected_pubrate = {self.test_config.stream_name: rate}

        retval = self._dsa_client.set_agent({'pubrate': expected_pubrate})

        retval = self._dsa_client.get_agent(['pubrate'])
        expected_pubrate_result = {'pubrate': expected_pubrate}
        self.assertEqual(retval, expected_pubrate_result)

    def assert_agent_command(self, command, args=None, timeout=None):
        """
        Verify an agent command
        @param command: driver command to execute
        @param args: kwargs to pass to the agent command object
        """
        cmd = AgentCommand(command=command, kwargs=args)
        retval = self._dsa_client.execute_agent(cmd, timeout=timeout)

    def assert_resource_command(self, command, args=None, timeout=None):
        """
        Verify a resource command
        @param command: driver command to execute
        @param args: kwargs to pass to the agent command object
        """
        cmd = AgentCommand(command=command, kwargs=args)
        retval = self._dsa_client.execute_resource(cmd)

    def assert_schema(self, caps_list):

        dd_list = ['display_name','description']
        ddt_list = ['display_name','description','type']
        ddvt_list = ['display_name','description','visibility','type']
        ddak_list = ['display_name','description','args','kwargs']

        for x in caps_list:
            if isinstance(x,dict):
                x.pop('type_')
                x = IonObject('AgentCapability', **x)

            if x.cap_type == CapabilityType.AGT_CMD:
                keys = x.schema.keys()
                for y in ddak_list:
                    self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.AGT_PAR:
                    if x.name != 'example':
                        keys = x.schema.keys()
                        for y in ddvt_list:
                            self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.RES_CMD:
                keys = x.schema.keys()
                self.assertIn('return',keys)
                self.assertIn('display_name',keys)
                self.assertIn('arguments',keys)
                self.assertIn('timeout',keys)

            elif x.cap_type == CapabilityType.RES_IFACE:
                pass

            elif x.cap_type == CapabilityType.RES_PAR:
                keys = x.schema.keys()
                self.assertIn('get_timeout',keys)
                self.assertIn('set_timeout',keys)
                self.assertIn('direct_access',keys)
                self.assertIn('startup',keys)
                self.assertIn('visibility',keys)

            elif x.cap_type == CapabilityType.AGT_STATES:
                for (k,v) in x.schema.iteritems():
                    keys = v.keys()
                    for y in dd_list:
                        self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.ALERT_DEFS:
                for (k,v) in x.schema.iteritems():
                    keys = v.keys()
                    for y in ddt_list:
                        self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.AGT_CMD_ARGS:
                for (k,v) in x.schema.iteritems():
                    keys = v.keys()
                    for y in ddt_list:
                        self.assertIn(y, keys)

    def assert_agent_capabilities(self):
        """
        Verify capabilities throughout the agent lifecycle
        """
        capabilities = {
            AgentCapabilityType.AGENT_COMMAND: self._common_agent_commands(ResourceAgentState.UNINITIALIZED),
            AgentCapabilityType.AGENT_PARAMETER: self._common_agent_parameters(),
            AgentCapabilityType.RESOURCE_COMMAND: None,
            AgentCapabilityType.RESOURCE_INTERFACE: None,
            AgentCapabilityType.RESOURCE_PARAMETER: None,
        }

        ###
        # DSA State INACTIVE
        ###

        log.debug("Initialize DataSet agent")
        self.assert_agent_command(ResourceAgentEvent.INITIALIZE)
        self.assert_state_change(ResourceAgentState.INACTIVE)
        self.assert_capabilities(capabilities)

        ###
        # DSA State IDLE
        ###

        log.debug("DataSet agent go active")
        capabilities[AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(ResourceAgentState.IDLE)
        self.assert_agent_command(ResourceAgentEvent.GO_ACTIVE)
        self.assert_state_change(ResourceAgentState.IDLE)
        self.assert_capabilities(capabilities)

        ###
        # DSA State COMMAND
        ###

        log.debug("DataSet agent run")
        capabilities[AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(ResourceAgentState.COMMAND)
        capabilities[AgentCapabilityType.RESOURCE_COMMAND] = ['DRIVER_EVENT_START_AUTOSAMPLE']
        capabilities[AgentCapabilityType.RESOURCE_PARAMETER] = self._common_resource_parameters()
        self.assert_agent_command(ResourceAgentEvent.RUN)
        self.assert_state_change(ResourceAgentState.COMMAND)
        self.assert_capabilities(capabilities)


        ###
        # DSA State STREAMING
        ###
        capabilities[AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(ResourceAgentState.STREAMING)
        capabilities[AgentCapabilityType.RESOURCE_COMMAND] = ['DRIVER_EVENT_STOP_AUTOSAMPLE']
        capabilities[AgentCapabilityType.RESOURCE_PARAMETER] = self._common_resource_parameters()
        self.assert_start_sampling()
        self.assert_capabilities(capabilities)


        ###
        # DSA State LOST_CONNECTION
        ###
        capabilities[AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(ResourceAgentState.LOST_CONNECTION)
        capabilities[AgentCapabilityType.RESOURCE_COMMAND] = None
        capabilities[AgentCapabilityType.RESOURCE_PARAMETER] = None
        self.assert_agent_command(ResourceAgentEvent.RESET)
        self.assert_state_change(ResourceAgentState.UNINITIALIZED)

        self.remove_sample_dir()
        self.assert_initialize(final_state=ResourceAgentState.COMMAND)
        self.assert_resource_command('DRIVER_EVENT_START_AUTOSAMPLE')
        self.assert_state_change(ResourceAgentState.LOST_CONNECTION, 90)

    def assert_driver_state(self, expected_state=None):
        '''
        verify that expected persisted agent state matches was it actually stored
        @param expected_state dict expected
        '''
        state = self._get_dsa_object_state()

        if expected_state is None:
            self.assertIsNone(expected_state)
        else:
            self.assertEqual(expected_state, state)

    def assert_agent_state_after_restart(self):
        '''
        Restart the agent.  Verify that the agent PID changes. Then verify the new state
        matches the old state.
        '''
        old_pid = self._dsa_pid
        old_state = self._get_dsa_object_state()

        # Start a resource agent client to talk with the instrument agent.
        log.info('Restarting DSA process')
        self._stop_dataset_agent_process()
        self._dsa_client = self._start_dataset_agent_process()
        log.debug("Client created: %s", type(self._dsa_client))
        self.addCleanup(self.assert_reset)

        self.assert_initialize()

        self.assertNotEqual(old_pid, self._dsa_pid)
        self.assertEqual(old_state, self._get_dsa_object_state())

        # Kick it into autosample and give it time for samples to come in, there shouldn't be any
        gevent.sleep(5)

    def assert_capabilities(self, capabilities):
        '''
        Verify that all capabilities are available for a give state

        @todo: Currently resource interface not implemented because it requires
               a submodule update and some of the submodules are in release
               states.  So for now, no resource interfaces

        @param: dictionary of all the different capability types that are
        supposed to be there. i.e.
        {
          agent_command = ['DO_MY_COMMAND'],
          agent_parameter = ['foo'],
          resource_command = None,
          resource_interface = None,
          resource_parameter = None,
        }
        '''
        def sort_capabilities(caps_list):
            '''
            sort a return value into capability buckets.
            @retval agt_cmds, agt_pars, res_cmds, res_iface, res_pars
            '''
            agt_cmds = []
            agt_pars = []
            res_cmds = []
            res_iface = []
            res_pars = []

            if len(caps_list)>0 and isinstance(caps_list[0], AgentCapability):
                agt_cmds = [x.name for x in caps_list if x.cap_type==CapabilityType.AGT_CMD]
                agt_pars = [x.name for x in caps_list if x.cap_type==CapabilityType.AGT_PAR]
                res_cmds = [x.name for x in caps_list if x.cap_type==CapabilityType.RES_CMD]
                #res_iface = [x.name for x in caps_list if x.cap_type==CapabilityType.RES_IFACE]
                res_pars = [x.name for x in caps_list if x.cap_type==CapabilityType.RES_PAR]

            elif len(caps_list)>0 and isinstance(caps_list[0], dict):
                agt_cmds = [x['name'] for x in caps_list if x['cap_type']==CapabilityType.AGT_CMD]
                agt_pars = [x['name'] for x in caps_list if x['cap_type']==CapabilityType.AGT_PAR]
                res_cmds = [x['name'] for x in caps_list if x['cap_type']==CapabilityType.RES_CMD]
                #res_iface = [x['name'] for x in caps_list if x['cap_type']==CapabilityType.RES_IFACE]
                res_pars = [x['name'] for x in caps_list if x['cap_type']==CapabilityType.RES_PAR]

            agt_cmds.sort()
            agt_pars.sort()
            res_cmds.sort()
            res_iface.sort()
            res_pars.sort()

            return agt_cmds, agt_pars, res_cmds, res_iface, res_pars

        if not capabilities.get(AgentCapabilityType.AGENT_COMMAND):
            capabilities[AgentCapabilityType.AGENT_COMMAND] = []
        if not capabilities.get(AgentCapabilityType.AGENT_PARAMETER):
            capabilities[AgentCapabilityType.AGENT_PARAMETER] = []
        if not capabilities.get(AgentCapabilityType.RESOURCE_COMMAND):
            capabilities[AgentCapabilityType.RESOURCE_COMMAND] = []
        if not capabilities.get(AgentCapabilityType.RESOURCE_INTERFACE):
            capabilities[AgentCapabilityType.RESOURCE_INTERFACE] = []
        if not capabilities.get(AgentCapabilityType.RESOURCE_PARAMETER):
            capabilities[AgentCapabilityType.RESOURCE_PARAMETER] = []


        expected_agent_cmd = capabilities.get(AgentCapabilityType.AGENT_COMMAND)
        expected_agent_cmd.sort()
        expected_agent_param = self._common_agent_parameters()
        expected_agent_param.sort()
        expected_res_cmd = capabilities.get(AgentCapabilityType.RESOURCE_COMMAND)
        expected_res_cmd.sort()
        expected_res_param = capabilities.get(AgentCapabilityType.RESOURCE_PARAMETER)
        expected_res_param.sort()
        expected_res_int = capabilities.get(AgentCapabilityType.RESOURCE_INTERFACE)
        expected_res_int.sort()

        # go get the active capabilities
        retval = self._dsa_client.get_capabilities()
        agt_cmds, agt_pars, res_cmds, res_iface, res_pars = sort_capabilities(retval)
        self.assert_schema(retval)

        log.debug("Agent Commands: %s ", str(agt_cmds))
        log.debug("Compared to: %s", expected_agent_cmd)
        log.debug("Agent Parameters: %s ", str(agt_pars))
        log.debug("Compared to: %s", expected_agent_param)
        log.debug("Resource Commands: %s ", str(res_cmds))
        log.debug("Compared to: %s", expected_res_cmd)
        log.debug("Resource Interface: %s ", str(res_iface))
        log.debug("Compared to: %s", expected_res_int)
        log.debug("Resource Parameter: %s ", str(res_pars))
        log.debug("Compared to: %s", expected_res_param)

        # Compare to what we are supposed to have
        self.assertEqual(expected_agent_cmd, agt_cmds)
        self.assertEqual(expected_agent_param, agt_pars)
        self.assertEqual(expected_res_cmd, res_cmds)
        self.assertEqual(expected_res_int, res_iface)
        self.assertEqual(expected_res_param, res_pars)

    def _common_resource_parameters(self):
        '''
        list of common resource parameters
        @return: list of resource parameters
        '''
        return ['batched_particle_count', 'publisher_polling_interval', 'records_per_second']

    def _common_agent_parameters(self):
        '''
        list of common agent parameters
        @return: list of agent parameters
        '''
        return ['aggstatus', 'alerts', 'driver_name', 'driver_pid', 'example', 'pubrate', 'streams']

    def _common_agent_commands(self, agent_state):
        '''
        list of common agent parameters for a agent state
        @return: list of agent parameters
        @raise: KeyError for undefined agent state
        '''
        capabilities = {
            ResourceAgentState.UNINITIALIZED: [
                ResourceAgentEvent.GO_ACTIVE,
                ResourceAgentEvent.RESET,
            ],
            ResourceAgentState.IDLE: [
                ResourceAgentEvent.GO_INACTIVE,
                ResourceAgentEvent.RESET,
                ResourceAgentEvent.RUN,
            ],
            ResourceAgentState.COMMAND: [
                ResourceAgentEvent.CLEAR,
                ResourceAgentEvent.RESET,
                ResourceAgentEvent.GO_INACTIVE,
                ResourceAgentEvent.PAUSE
            ],
            ResourceAgentState.STREAMING: [
                ResourceAgentEvent.RESET,
                ResourceAgentEvent.GO_INACTIVE
            ],

            ResourceAgentState.LOST_CONNECTION: [
                ResourceAgentEvent.RESET,
                ResourceAgentEvent.GO_INACTIVE
            ]
        }

        return capabilities[agent_state]

    def assert_state_change(self, target_agent_state, timeout=10):
        """
        Verify the agent and resource states change as expected within the timeout
        Fail if the state doesn't change to the expected state.
        @param target_agent_state: State we expect the agent to be in
        @param timeout: how long to wait for the driver to change states
        """
        to = gevent.Timeout(timeout)
        to.start()
        done = False
        agent_state = None

        try:
            while(not done):

                agent_state = self._dsa_client.get_agent_state()
                log.error("Current agent state: %s", agent_state)

                if(agent_state == target_agent_state):
                    log.debug("Current state match: %s", agent_state)
                    done = True

                if not done:
                    log.debug("state mismatch, waiting for state to transition.")
                    gevent.sleep(1)
        except Timeout:
            log.error("Failed to transition agent state to %s, current state: %s", target_agent_state, agent_state)
            self.fail("Failed to transition state.")
        finally:
            to.cancel()
コード例 #10
0
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)

    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries" )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id) )

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module="ion.agents.eoi.external_dataset_agent", handler_class="ExternalDatasetAgent" )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id) )


        # Create ExternalDataset
        log.debug('TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset" )
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        #register the dataset as a data producer
        self.damsclient.register_external_data_set(extDataset_id)


        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config' : DVR_CONFIG,
            'stream_config' : self._stream_config,
            'agent'         : {'resource_id': EDA_RESOURCE_ID},
            'test_mode' : True
        }

        extDatasetAgentInstance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config = DVR_CONFIG, dataset_agent_config = agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj) )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id) )
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct,name='eoi dataset data',description=' stream test')
        try:
            data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1) )

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1, persist_data=True, persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids) )


        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)


        dataset_agent_instance_obj= self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj) )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,  process=FakeProcess())
        print 'activate_instrument: got ia client %s', self._dsa_client
        log.debug("test_activateInstrument: got dataset client %s", str(self._dsa_client))

        cmd=AgentCommand(command='initialize')
        _ = self._dsa_client.execute_agent(cmd)

        cmd = AgentCommand(command='go_active')
        _ = self._dsa_client.execute_agent(cmd)

        cmd = AgentCommand(command='run')
        _ = self._dsa_client.execute_agent(cmd)

        log.info('Send an unconstrained request for data (\'new data\')')
        config={'stream_id':'first_new','TESTING':True}
        cmd = AgentCommand(command='acquire_data', args=[config])
        self._dsa_client.execute(cmd)

        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        config={'stream_id':'second_new','TESTING':True}
        cmd = AgentCommand(command='acquire_data', args=[config])
        self._dsa_client.execute(cmd)

        cmd = AgentCommand(command='reset')
        _ = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)
コード例 #11
0
class TestExternalDatasetAgentMgmt(IonIntegrationTestCase):

    # DataHandler config
    DVR_CONFIG = {
        'dvr_mod' : 'ion.agents.data.handlers.base_data_handler',
        'dvr_cls' : 'DummyDataHandler',
    }

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        log.debug("TestExternalDatasetAgentMgmt: started services")

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)

    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel, name='ExampleDatasetModel', description="ExampleDatasetModel", datset_type="FibSeries" )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id) )

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent, name='datasetagent007', description="datasetagent007", handler_module=EDA_MOD, handler_class=EDA_CLS )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id) )


        # Create ExternalDataset
        log.debug('TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset, name='ExtDataset', description="ExtDataset" )
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        #register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(extDataset_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct,name='eoi dataset data',description=' stream test')
        try:
            data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1) )

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1, persist_data=True, persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids) )
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set('data', 'external_data')

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,#TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            'data_producer_id':dproducer_id,
#            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            'taxonomy':tx.dump(), #TODO: Currently does not support sets
            'max_records':4,
            }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config' : self.DVR_CONFIG,
            'stream_config' : self._stream_config,
            'agent'         : {'resource_id': EDA_RESOURCE_ID},
            'test_mode' : True
        }

        extDatasetAgentInstance_obj = IonObject(RT.ExternalDatasetAgentInstance, name='DatasetAgentInstance', description="DatasetAgentInstance", dataset_driver_config = self.DVR_CONFIG, dataset_agent_config = agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(external_dataset_agent_instance=extDatasetAgentInstance_obj, external_dataset_agent_id=datasetAgent_id, external_dataset_id=extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj) )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id) )

        #Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ", str(id), str(active) )

        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)


        dataset_agent_instance_obj= self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj) )

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ", str(id), str(active) )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,  process=FakeProcess())
        print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client))

#        cmd=AgentCommand(command='initialize')
#        _ = self._dsa_client.execute_agent(cmd)
#
#        cmd = AgentCommand(command='go_active')
#        _ = self._dsa_client.execute_agent(cmd)
#
#        cmd = AgentCommand(command='run')
#        _ = self._dsa_client.execute_agent(cmd)
#
#        log.info('Send an unconstrained request for data (\'new data\')')
#        cmd = AgentCommand(command='acquire_data')
#        self._dsa_client.execute(cmd)
#
#        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
#        cmd = AgentCommand(command='acquire_data')
#        self._dsa_client.execute(cmd)
#
#        cmd = AgentCommand(command='reset')
#        _ = self._dsa_client.execute_agent(cmd)
#        cmd = AgentCommand(command='get_current_state')
#        retval = self._dsa_client.execute_agent(cmd)
#        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='resume')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='reset')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)




        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)
コード例 #12
0
class DatasetAgentTestCase(IonIntegrationTestCase):
    """
    Base class for all dataset agent end to end tests
    """
    test_config = DatasetAgentTestConfig()

    def setUp(self, deploy_file=DEPLOY_FILE):
        """
        Start container.
        Start deploy services.
        Define agent config, start agent.
        Start agent client.
        """
        self._dsa_client = None
        self.dams = DataAcquisitionManagementServiceClient()

        # Ensure we have a good test configuration
        self.test_config.verify()

        # Start container.
        log.info('Staring capability container.')
        self._start_container()
        self.rr = self.container.resource_registry

        # Bring up services in a deploy file (no need to message)
        log.info('Starting deploy services. %s', deploy_file)
        self.container.start_rel_from_url(DEPLOY_FILE)

        # Load instrument specific parameters
        log.info('Preload test scenarios')
        self._load_params()

        # Start a resource agent client to talk with the instrument agent.
        log.info('Starting DSA process')
        self._dsa_client = self._start_dataset_agent_process()
        log.debug("Client created: %s", type(self._dsa_client))
        self.addCleanup(self._stop_dataset_agent_process)
        log.info('test setup complete')

        # Start data subscribers
        self._start_data_subscribers()
        self.addCleanup(self._stop_data_subscribers)

    ###
    #   Test/Agent Startup Helpers
    ###
    def _load_params(self):
        """
        Do a second round of preload with instrument specific scenarios
        """
        scenario = None
        categories = None

        if PRELOAD_CATEGORIES:
            categories = ",".join(PRELOAD_CATEGORIES)

        # load_parameter_scenarios
        if PRELOAD_SCENARIO:
            scenario = PRELOAD_SCENARIO
        else:
            log.warn("No common preload defined.  Was this intentional?")

        if self.test_config.preload_scenario:
            scenario = "%s,%s" % (
                scenario, self.test_config.preload_scenario
            ) if scenario else self.test_config.preload_scenario
        else:
            log.warn("No DSA specific preload defined.  Was this intentional?")

        if scenario:
            preload_config = dict(
                op="load",
                scenario=scenario,
                #path="master",
                path=TESTED_DOC,
                categories=categories,
                clearcols="owner_id,org_ids",
                #assets="res/preload/r2_ioc/ooi_assets",
                #parseooi="True",
            )
            log.debug("Starting preload now: config=%s", preload_config)
            self.container.spawn_process("Loader",
                                         "ion.processes.bootstrap.ion_loader",
                                         "IONLoader", preload_config)

    def _start_dataset_agent_process(self):
        """
        Launch the agent process and store the configuration.  Tried
        to emulate the same process used by import_data.py
        """
        instrument_device, dsa_instance = self._get_dsa_instance()
        self._driver_config = dsa_instance.driver_config

        self._update_dsa_config(dsa_instance)
        self._update_harvester_config(dsa_instance)

        self._dsa_instance = dsa_instance
        self.clear_sample_data()

        # Return a resource agent client
        return self._get_dsa_client(instrument_device, dsa_instance)

    def _stop_dataset_agent_process(self):
        """
        Stop the dataset agent instance
        """
        self.assert_reset()
        self.dams.stop_external_dataset_agent_instance(self._dsa_instance._id)

    def _get_dsa_instance(self):
        """
        Find the dsa instance in preload and return an instance of that object
        """
        name = self.test_config.instrument_device_name

        log.debug("Start dataset agent process for instrument device: %s",
                  name)
        objects, _ = self.rr.find_resources(RT.InstrumentDevice, name=name)
        log.debug("Found Instrument Devices: %s", objects)
        if not objects:
            raise ConfigNotFound(
                "No appropriate InstrumentDevice objects loaded")

        instrument_device = objects[0]
        log.trace("Found instrument device: %s", instrument_device)

        dsa_instance = self.rr.read_object(
            subject=instrument_device._id,
            predicate=PRED.hasAgentInstance,
            object_type=RT.ExternalDatasetAgentInstance)

        log.info("dsa_instance found: %s", dsa_instance)

        return instrument_device, dsa_instance

    def _update_harvester_config(self, dsa_instance):
        """
        Update the harvester config such that we change the directory to something
        we have write permissions.
        """
        log.info("dsa agent instance: %s", dsa_instance)
        driver_config = dsa_instance.driver_config

        log.info("dsa agent driver config: %s", driver_config)
        driver_config['startup_config']['harvester'][
            'directory'] = self.test_config.data_dir

        log.info("updated driver config: %s", driver_config)
        dsa_instance.driver_config = driver_config

        self.rr.update(dsa_instance)

    def _update_dsa_config(self, dsa_instance):
        """
        Update the dsa configuration prior to loading the agent.  This is where we can
        alter production configurations for use in a controlled test environment.
        """
        dsa_obj = self.rr.read_object(object_type=RT.ExternalDatasetAgent,
                                      predicate=PRED.hasAgentDefinition,
                                      subject=dsa_instance._id,
                                      id_only=False)

        log.info("dsa agent definition found: %s", dsa_obj)

        # If we don't want to load from an egg then we need to
        # alter the driver config read from preload
        if self.test_config.mi_repo is not None:
            dsa_obj.driver_uri = None
            # Strip the custom namespace
            dsa_obj.driver_module = ".".join(
                dsa_obj.driver_module.split('.')[1:])

            log.info("saving new dsa agent config: %s", dsa_obj)
            self.rr.update(dsa_obj)

            if not self.test_config.mi_repo in sys.path:
                sys.path.insert(0, self.test_config.mi_repo)

            log.debug("Driver module: %s", dsa_obj.driver_module)
            log.debug("MI Repo: %s", self.test_config.mi_repo)
            log.trace("Sys Path: %s", sys.path)

    def _get_dsa_client(self, instrument_device, dsa_instance):
        """
        Launch the agent and return a client
        """
        fake_process = FakeProcess()
        fake_process.container = self.container

        clients = DataAcquisitionManagementServiceDependentClients(
            fake_process)
        config_builder = ExternalDatasetAgentConfigurationBuilder(clients)

        try:
            config_builder.set_agent_instance_object(dsa_instance)
            self.agent_config = config_builder.prepare()
        except Exception as e:
            log.error('failed to launch: %s', e, exc_info=True)
            raise ServerError('failed to launch')

        self._dsa_pid = self.dams.start_external_dataset_agent_instance(
            dsa_instance._id)
        log.debug("_get_dsa_client CFG")
        return ResourceAgentClient(instrument_device._id,
                                   process=FakeProcess())

        dispatcher = ProcessDispatcherServiceClient()
        launcher = AgentLauncher(dispatcher)

        log.debug("Launching agent process!")

        self._dsa_pid = launcher.launch(
            self.agent_config,
            config_builder._get_process_definition()._id)
        if not self._dsa_pid:
            raise ServerError(
                "Launched external dataset agent instance but no process_id")
        config_builder.record_launch_parameters(self.agent_config)

        launcher.await_launch(10.0)
        return ResourceAgentClient(instrument_device._id,
                                   process=FakeProcess())

    def _get_dsa_object_state(self):
        state, _id = self.container.state_repository.get_state(self._dsa_pid)
        log.debug("agent_state (%s): %s", self._dsa_pid, state)

        driver_state = state.get(DSA_STATE_KEY)
        log.debug("driver_state (%s): %s", self._dsa_pid, driver_state)

        return driver_state

    ###
    #   Data file helpers
    ###

    def _get_source_data_file(self, filename):
        """
        Search for a sample data file, first check the driver resource directory
        then just use the filename as a path.  If the file doesn't exists
        raise an exception
        @param filename name or path of the file to search for
        @return full path to the found data file
        @raise IonException if the file isn't found
        """
        resource_dir = self.test_config.test_resource_dir
        source_path = os.path.join(resource_dir, filename)

        log.debug("Search for resource file (%s) in %s", filename,
                  resource_dir)
        if os.path.isfile(source_path):
            log.debug("Found %s in resource directory", filename)
            return source_path

        log.debug("Search for resource file (%s) in current directory",
                  filename)
        if os.path.isfile(filename):
            log.debug("Found %s in the current directory", filename)
            return filename

        raise IonException("Data file %s does not exist", filename)

    def create_data_dir(self):
        """
        Verify the test data directory is created and exists.  Return the path to
        the directory.
        @return: path to data directory
        @raise: ConfigNotFound no harvester config
        @raise: IonException if data_dir exists, but not a directory
        """
        startup_config = self._driver_config.get('startup_config')
        if not startup_config:
            raise ConfigNotFound("Driver config missing 'startup_config'")

        harvester_config = startup_config.get('harvester')
        if not harvester_config:
            raise ConfigNotFound("Startup config missing 'harvester' config")

        data_dir = harvester_config.get("directory")
        if not data_dir:
            raise ConfigNotFound("Harvester config missing 'directory'")

        if not os.path.exists(data_dir):
            log.debug("Creating data dir: %s", data_dir)
            os.makedirs(data_dir)

        elif not os.path.isdir(data_dir):
            raise IonException("'data_dir' is not a directory")

        return data_dir

    def clear_sample_data(self):
        """
        Remove all files from the sample data directory
        """
        data_dir = self.create_data_dir()

        log.debug("Clean all data from %s", data_dir)
        self.remove_all_files(data_dir)

    def create_sample_data(self, filename, dest_filename=None):
        """
        Search for a data file in the driver resource directory and if the file
        is not found there then search using the filename directly.  Then copy
        the file to the test data directory.

        If a dest_filename is supplied it will be renamed in the destination
        directory.
        @param: filename - filename or path to a data file to copy
        @param: dest_filename - name of the file when copied. default to filename
        """
        data_dir = self.create_data_dir()
        source_path = self._get_source_data_file(filename)

        log.debug("DIR: %s", data_dir)
        if dest_filename is None:
            dest_path = os.path.join(data_dir, os.path.basename(source_path))
        else:
            dest_path = os.path.join(data_dir, dest_filename)

        log.debug("Creating data file src: %s, dest: %s", source_path,
                  dest_path)
        shutil.copy2(source_path, dest_path)

        return dest_path

    def remove_all_files(self, dir_name):
        """
        Remove all files from a directory.  Raise an exception if the directory contains something
        other than files.
        @param dir_name directory path to remove files.
        @raise RuntimeError if the directory contains anything except files.
        """
        for file_name in os.listdir(dir_name):
            file_path = os.path.join(dir_name, file_name)
            if not os.path.isfile(file_path):
                raise RuntimeError("%s is not a file", file_path)

        for file_name in os.listdir(dir_name):
            file_path = os.path.join(dir_name, file_name)
            os.unlink(file_path)

    ###############################################################################
    # Event helpers.
    ###############################################################################

    def _start_event_subscriber(self, type='ResourceAgentEvent', count=0):
        """
        Start a subscriber to the instrument agent events.
        @param type The type of event to catch.
        @count Trigger the async event result when events received reaches this.
        """
        def consume_event(*args, **kwargs):
            log.info('Test recieved ION event: args=%s, kwargs=%s, event=%s.',
                     str(args), str(kwargs), str(args[0]))
            self._events_received.append(args[0])
            if self._event_count > 0 and \
                self._event_count == len(self._events_received):
                self._async_event_result.set()

        # Event array and async event result.
        self._event_count = count
        self._events_received = []
        self._async_event_result = AsyncResult()

        self._event_subscriber = EventSubscriber(
            event_type=type, callback=consume_event,
            origin="IA_RESOURCE_ID")  #TODO
        self._event_subscriber.start()
        self._event_subscriber._ready_event.wait(timeout=5)

    def _stop_event_subscriber(self):
        """
        Stop event subscribers on cleanup.
        """
        self._event_subscriber.stop()
        self._event_subscriber = None

    ###############################################################################
    # Data stream helpers.
    ###############################################################################
    def _start_data_subscribers(self):
        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            if self._samples_received.get(stream_id) is None:
                self._samples_received[stream_id] = []

            log.info('Received parsed data on %s (%s,%s)', stream_id,
                     stream_route.exchange_point, stream_route.routing_key)
            self._samples_received[stream_id].append(message)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = {}
        self._stream_id_map = {}
        stream_config = self.agent_config['stream_config']

        log.info("starting data subscribers")

        for stream_name in stream_config.keys():
            log.debug("Starting data subscriber for stream '%s'", stream_name)
            stream_id = stream_config[stream_name]['stream_id']
            self._stream_id_map[stream_name] = stream_id
            self._start_data_subscriber(stream_config[stream_name], recv_data)

    def _start_data_subscriber(self, config, callback):
        """
        Setup and start a data subscriber
        """
        exchange_point = config['exchange_point']
        stream_id = config['stream_id']

        sub = StandaloneStreamSubscriber(exchange_point, callback)
        sub.start()
        self._data_subscribers.append(sub)

        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        sub_id = pubsub_client.create_subscription(name=exchange_point,
                                                   stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)

    def make_data_product(self, pdict_name, dp_name, available_fields=None):
        self.pubsub_management = PubsubManagementServiceClient()
        if available_fields is None: available_fields = []
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            pdict_name, id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition(
            '%s stream_def' % dp_name,
            parameter_dictionary_id=pdict_id,
            available_fields=available_fields or None)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)
        dp_obj = DataProduct(name=dp_name)
        data_product_id = self.data_product_management.create_data_product(
            dp_obj, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product,
                        data_product_id)
        return data_product_id

    def _stop_data_subscribers(self):
        for subscriber in self._data_subscribers:
            pubsub_client = PubsubManagementServiceClient()
            if hasattr(subscriber, 'subscription_id'):
                try:
                    pubsub_client.deactivate_subscription(
                        subscriber.subscription_id)
                except:
                    pass
                pubsub_client.delete_subscription(subscriber.subscription_id)
            subscriber.stop()

    def get_samples(self, stream_name, sample_count=1, timeout=30):
        """
        listen on a stream until 'sample_count' samples are read and return
        a list of all samples read.  If the required number of samples aren't
        read then throw an exception.

        Note that this method does not clear the sample queue for the stream.
        This should be done explicitly by the caller.  However, samples that
        are consumed by this method are removed.

        @raise SampleTimeout - if the required number of samples aren't read
        """
        to = gevent.Timeout(timeout)
        to.start()
        done = False
        result = []
        i = 0

        log.debug("Fetch %s sample(s) from stream '%s'" %
                  (sample_count, stream_name))

        stream_id = self._stream_id_map.get(stream_name)
        log.debug("Stream ID Map: %s ", self._stream_id_map)
        self.assertIsNotNone(stream_id,
                             msg="Unable to find stream name '%s'" %
                             stream_name)

        try:
            while (not done):
                if (self._samples_received.has_key(stream_id)
                        and len(self._samples_received.get(stream_id))):
                    log.trace("get_samples() received sample #%d!", i)
                    result.append(self._samples_received[stream_id].pop(0))
                    i += 1

                    if i >= sample_count:
                        done = True

                else:
                    log.debug(
                        "No samples in %s. Sleep a bit to wait for the data queue to fill up.",
                        stream_name)
                    gevent.sleep(1)

        except Timeout:
            log.error("Failed to get %d records from %s.  received: %d",
                      sample_count, stream_name, i)
            self.fail("Failed to read samples from stream %s", stream_name)
        finally:
            to.cancel()
            return result

    def remove_sample_dir(self):
        """
        Remove the sample dir and all files
        """
        data_dir = self.create_data_dir()
        self.clear_sample_data()
        os.rmdir(data_dir)

    ###
    #   Common assert methods
    ###

    def assertDictEqual(self, d1, d2, msg=None):  # assertEqual uses for dicts
        for k, v1 in d1.iteritems():
            self.assertIn(k, d2, msg)
            v2 = d2[k]
            if (isinstance(v1, collections.Iterable)
                    and not isinstance(v1, basestring)):
                self.assertItemsEqual(v1, v2, msg)
            else:
                self.assertEqual(v1, v2, msg)
        return True

    def assert_initialize(self, final_state=ResourceAgentState.STREAMING):
        '''
        Walk through DSA states to get to streaming mode from uninitialized
        '''
        state = self._dsa_client.get_agent_state()

        with self.assertRaises(Conflict):
            res_state = self._dsa_client.get_resource_state()

        log.debug("Initialize DataSet agent")
        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.INACTIVE)
        log.info("Sent INITIALIZE; DSA state = %s", state)

        log.debug("DataSet agent go active")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent GO_ACTIVE; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.IDLE)

        log.debug("DataSet agent run")
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._dsa_client.execute_agent(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent RUN; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.COMMAND)

        if final_state == ResourceAgentState.STREAMING:
            self.assert_start_sampling()

    def assert_stop_sampling(self):
        '''
        transition to command.  Must be called from streaming
        '''
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.STREAMING)

        log.debug("DataSet agent start sampling")
        cmd = AgentCommand(command='DRIVER_EVENT_STOP_AUTOSAMPLE')
        retval = self._dsa_client.execute_resource(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent START SAMPLING; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.COMMAND)

    def assert_start_sampling(self):
        '''
        transition to sampling.  Must be called from command
        '''
        state = self._dsa_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        log.debug("DataSet agent start sampling")
        cmd = AgentCommand(command='DRIVER_EVENT_START_AUTOSAMPLE')
        retval = self._dsa_client.execute_resource(cmd)
        state = self._dsa_client.get_agent_state()
        log.info("Sent START SAMPLING; DSA state = %s", state)
        self.assertEqual(state, ResourceAgentState.STREAMING)

    def assert_reset(self):
        '''
        Put the instrument back in uninitialized
        '''
        if self._dsa_client is None:
            return

        state = self._dsa_client.get_agent_state()

        if state != ResourceAgentState.UNINITIALIZED:
            cmd = AgentCommand(command=ResourceAgentEvent.RESET)
            retval = self._dsa_client.execute_agent(cmd)
            state = self._dsa_client.get_agent_state()

        self.assertEqual(state, ResourceAgentState.UNINITIALIZED)

    def assert_data_values(self, granules, dataset_definition_file):
        """
        Verify granules match the granules defined in the definition file
        """
        rs_file = self._get_source_data_file(dataset_definition_file)
        rs = ResultSet(rs_file)

        self.assertTrue(rs.verify(granules),
                        msg="Failed data validation.  See log for details")

    def assert_sample_queue_size(self, stream_name, size):
        """
        verify a sample queue is the size we expect it to be.
        """
        # Sleep a couple seconds to ensure the
        gevent.sleep(2)

        stream_id = self._stream_id_map.get(stream_name)
        length = 0
        if stream_id in self._samples_received:
            length = len(self._samples_received[stream_id])
        self.assertEqual(length,
                         size,
                         msg="Queue size != expected size (%d != %d)" %
                         (length, size))

    def assert_set_pubrate(self, rate):
        """
        Set the pubrate for the parsed data stream.  Set to 0 for
        no buffering
        """
        self.assertIsInstance(rate, (int, float))
        self.assertGreaterEqual(rate, 0)

        expected_pubrate = {self.test_config.stream_name: rate}

        retval = self._dsa_client.set_agent({'pubrate': expected_pubrate})

        retval = self._dsa_client.get_agent(['pubrate'])
        expected_pubrate_result = {'pubrate': expected_pubrate}
        self.assertEqual(retval, expected_pubrate_result)

    def assert_agent_command(self, command, args=None, timeout=None):
        """
        Verify an agent command
        @param command: driver command to execute
        @param args: kwargs to pass to the agent command object
        """
        cmd = AgentCommand(command=command, kwargs=args)
        retval = self._dsa_client.execute_agent(cmd, timeout=timeout)

    def assert_resource_command(self, command, args=None, timeout=None):
        """
        Verify a resource command
        @param command: driver command to execute
        @param args: kwargs to pass to the agent command object
        """
        cmd = AgentCommand(command=command, kwargs=args)
        retval = self._dsa_client.execute_resource(cmd)

    def assert_schema(self, caps_list):

        dd_list = ['display_name', 'description']
        ddt_list = ['display_name', 'description', 'type']
        ddvt_list = ['display_name', 'description', 'visibility', 'type']
        ddak_list = ['display_name', 'description', 'args', 'kwargs']

        for x in caps_list:
            if isinstance(x, dict):
                x.pop('type_')
                x = IonObject('AgentCapability', **x)

            if x.cap_type == CapabilityType.AGT_CMD:
                keys = x.schema.keys()
                for y in ddak_list:
                    self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.AGT_PAR:
                if x.name != 'example':
                    keys = x.schema.keys()
                    for y in ddvt_list:
                        self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.RES_CMD:
                keys = x.schema.keys()
                self.assertIn('return', keys)
                self.assertIn('display_name', keys)
                self.assertIn('arguments', keys)
                self.assertIn('timeout', keys)

            elif x.cap_type == CapabilityType.RES_IFACE:
                pass

            elif x.cap_type == CapabilityType.RES_PAR:
                keys = x.schema.keys()
                self.assertIn('get_timeout', keys)
                self.assertIn('set_timeout', keys)
                self.assertIn('direct_access', keys)
                self.assertIn('startup', keys)
                self.assertIn('visibility', keys)

            elif x.cap_type == CapabilityType.AGT_STATES:
                for (k, v) in x.schema.iteritems():
                    keys = v.keys()
                    for y in dd_list:
                        self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.ALERT_DEFS:
                for (k, v) in x.schema.iteritems():
                    keys = v.keys()
                    for y in ddt_list:
                        self.assertIn(y, keys)

            elif x.cap_type == CapabilityType.AGT_CMD_ARGS:
                for (k, v) in x.schema.iteritems():
                    keys = v.keys()
                    for y in ddt_list:
                        self.assertIn(y, keys)

    def assert_agent_capabilities(self):
        """
        Verify capabilities throughout the agent lifecycle
        """
        capabilities = {
            AgentCapabilityType.AGENT_COMMAND:
            self._common_agent_commands(ResourceAgentState.UNINITIALIZED),
            AgentCapabilityType.AGENT_PARAMETER:
            self._common_agent_parameters(),
            AgentCapabilityType.RESOURCE_COMMAND:
            None,
            AgentCapabilityType.RESOURCE_INTERFACE:
            None,
            AgentCapabilityType.RESOURCE_PARAMETER:
            None,
        }

        ###
        # DSA State INACTIVE
        ###

        log.debug("Initialize DataSet agent")
        self.assert_agent_command(ResourceAgentEvent.INITIALIZE)
        self.assert_state_change(ResourceAgentState.INACTIVE)
        self.assert_capabilities(capabilities)

        ###
        # DSA State IDLE
        ###

        log.debug("DataSet agent go active")
        capabilities[
            AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(
                ResourceAgentState.IDLE)
        self.assert_agent_command(ResourceAgentEvent.GO_ACTIVE)
        self.assert_state_change(ResourceAgentState.IDLE)
        self.assert_capabilities(capabilities)

        ###
        # DSA State COMMAND
        ###

        log.debug("DataSet agent run")
        capabilities[
            AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(
                ResourceAgentState.COMMAND)
        capabilities[AgentCapabilityType.RESOURCE_COMMAND] = [
            'DRIVER_EVENT_START_AUTOSAMPLE'
        ]
        capabilities[AgentCapabilityType.
                     RESOURCE_PARAMETER] = self._common_resource_parameters()
        self.assert_agent_command(ResourceAgentEvent.RUN)
        self.assert_state_change(ResourceAgentState.COMMAND)
        self.assert_capabilities(capabilities)

        ###
        # DSA State STREAMING
        ###
        capabilities[
            AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(
                ResourceAgentState.STREAMING)
        capabilities[AgentCapabilityType.RESOURCE_COMMAND] = [
            'DRIVER_EVENT_STOP_AUTOSAMPLE'
        ]
        capabilities[AgentCapabilityType.
                     RESOURCE_PARAMETER] = self._common_resource_parameters()
        self.assert_start_sampling()
        self.assert_capabilities(capabilities)

        ###
        # DSA State LOST_CONNECTION
        ###
        capabilities[
            AgentCapabilityType.AGENT_COMMAND] = self._common_agent_commands(
                ResourceAgentState.LOST_CONNECTION)
        capabilities[AgentCapabilityType.RESOURCE_COMMAND] = None
        capabilities[AgentCapabilityType.RESOURCE_PARAMETER] = None
        self.assert_agent_command(ResourceAgentEvent.RESET)
        self.assert_state_change(ResourceAgentState.UNINITIALIZED)

        self.remove_sample_dir()
        self.assert_initialize(final_state=ResourceAgentState.COMMAND)
        self.assert_resource_command('DRIVER_EVENT_START_AUTOSAMPLE')
        self.assert_state_change(ResourceAgentState.LOST_CONNECTION, 90)

    def assert_driver_state(self, expected_state=None):
        '''
        verify that expected persisted agent state matches was it actually stored
        @param expected_state dict expected
        '''
        state = self._get_dsa_object_state()

        if expected_state is None:
            self.assertIsNone(expected_state)
        else:
            self.assertEqual(expected_state, state)

    def assert_agent_state_after_restart(self):
        '''
        Restart the agent.  Verify that the agent PID changes. Then verify the new state
        matches the old state.
        '''
        old_pid = self._dsa_pid
        old_state = self._get_dsa_object_state()

        # Start a resource agent client to talk with the instrument agent.
        log.info('Restarting DSA process')
        self._stop_dataset_agent_process()
        self._dsa_client = self._start_dataset_agent_process()
        log.debug("Client created: %s", type(self._dsa_client))
        self.addCleanup(self.assert_reset)

        self.assert_initialize()

        self.assertNotEqual(old_pid, self._dsa_pid)
        self.assertEqual(old_state, self._get_dsa_object_state())

        # Kick it into autosample and give it time for samples to come in, there shouldn't be any
        gevent.sleep(5)

    def assert_capabilities(self, capabilities):
        '''
        Verify that all capabilities are available for a give state

        @todo: Currently resource interface not implemented because it requires
               a submodule update and some of the submodules are in release
               states.  So for now, no resource interfaces

        @param: dictionary of all the different capability types that are
        supposed to be there. i.e.
        {
          agent_command = ['DO_MY_COMMAND'],
          agent_parameter = ['foo'],
          resource_command = None,
          resource_interface = None,
          resource_parameter = None,
        }
        '''
        def sort_capabilities(caps_list):
            '''
            sort a return value into capability buckets.
            @retval agt_cmds, agt_pars, res_cmds, res_iface, res_pars
            '''
            agt_cmds = []
            agt_pars = []
            res_cmds = []
            res_iface = []
            res_pars = []

            if len(caps_list) > 0 and isinstance(caps_list[0],
                                                 AgentCapability):
                agt_cmds = [
                    x.name for x in caps_list
                    if x.cap_type == CapabilityType.AGT_CMD
                ]
                agt_pars = [
                    x.name for x in caps_list
                    if x.cap_type == CapabilityType.AGT_PAR
                ]
                res_cmds = [
                    x.name for x in caps_list
                    if x.cap_type == CapabilityType.RES_CMD
                ]
                #res_iface = [x.name for x in caps_list if x.cap_type==CapabilityType.RES_IFACE]
                res_pars = [
                    x.name for x in caps_list
                    if x.cap_type == CapabilityType.RES_PAR
                ]

            elif len(caps_list) > 0 and isinstance(caps_list[0], dict):
                agt_cmds = [
                    x['name'] for x in caps_list
                    if x['cap_type'] == CapabilityType.AGT_CMD
                ]
                agt_pars = [
                    x['name'] for x in caps_list
                    if x['cap_type'] == CapabilityType.AGT_PAR
                ]
                res_cmds = [
                    x['name'] for x in caps_list
                    if x['cap_type'] == CapabilityType.RES_CMD
                ]
                #res_iface = [x['name'] for x in caps_list if x['cap_type']==CapabilityType.RES_IFACE]
                res_pars = [
                    x['name'] for x in caps_list
                    if x['cap_type'] == CapabilityType.RES_PAR
                ]

            agt_cmds.sort()
            agt_pars.sort()
            res_cmds.sort()
            res_iface.sort()
            res_pars.sort()

            return agt_cmds, agt_pars, res_cmds, res_iface, res_pars

        if not capabilities.get(AgentCapabilityType.AGENT_COMMAND):
            capabilities[AgentCapabilityType.AGENT_COMMAND] = []
        if not capabilities.get(AgentCapabilityType.AGENT_PARAMETER):
            capabilities[AgentCapabilityType.AGENT_PARAMETER] = []
        if not capabilities.get(AgentCapabilityType.RESOURCE_COMMAND):
            capabilities[AgentCapabilityType.RESOURCE_COMMAND] = []
        if not capabilities.get(AgentCapabilityType.RESOURCE_INTERFACE):
            capabilities[AgentCapabilityType.RESOURCE_INTERFACE] = []
        if not capabilities.get(AgentCapabilityType.RESOURCE_PARAMETER):
            capabilities[AgentCapabilityType.RESOURCE_PARAMETER] = []

        expected_agent_cmd = capabilities.get(
            AgentCapabilityType.AGENT_COMMAND)
        expected_agent_cmd.sort()
        expected_agent_param = self._common_agent_parameters()
        expected_agent_param.sort()
        expected_res_cmd = capabilities.get(
            AgentCapabilityType.RESOURCE_COMMAND)
        expected_res_cmd.sort()
        expected_res_param = capabilities.get(
            AgentCapabilityType.RESOURCE_PARAMETER)
        expected_res_param.sort()
        expected_res_int = capabilities.get(
            AgentCapabilityType.RESOURCE_INTERFACE)
        expected_res_int.sort()

        # go get the active capabilities
        retval = self._dsa_client.get_capabilities()
        agt_cmds, agt_pars, res_cmds, res_iface, res_pars = sort_capabilities(
            retval)
        self.assert_schema(retval)

        log.debug("Agent Commands: %s ", str(agt_cmds))
        log.debug("Compared to: %s", expected_agent_cmd)
        log.debug("Agent Parameters: %s ", str(agt_pars))
        log.debug("Compared to: %s", expected_agent_param)
        log.debug("Resource Commands: %s ", str(res_cmds))
        log.debug("Compared to: %s", expected_res_cmd)
        log.debug("Resource Interface: %s ", str(res_iface))
        log.debug("Compared to: %s", expected_res_int)
        log.debug("Resource Parameter: %s ", str(res_pars))
        log.debug("Compared to: %s", expected_res_param)

        # Compare to what we are supposed to have
        self.assertEqual(expected_agent_cmd, agt_cmds)
        self.assertEqual(expected_agent_param, agt_pars)
        self.assertEqual(expected_res_cmd, res_cmds)
        self.assertEqual(expected_res_int, res_iface)
        self.assertEqual(expected_res_param, res_pars)

    def _common_resource_parameters(self):
        '''
        list of common resource parameters
        @return: list of resource parameters
        '''
        return [
            'batched_particle_count', 'publisher_polling_interval',
            'records_per_second'
        ]

    def _common_agent_parameters(self):
        '''
        list of common agent parameters
        @return: list of agent parameters
        '''
        return [
            'aggstatus', 'alerts', 'driver_name', 'driver_pid', 'example',
            'pubrate', 'streams'
        ]

    def _common_agent_commands(self, agent_state):
        '''
        list of common agent parameters for a agent state
        @return: list of agent parameters
        @raise: KeyError for undefined agent state
        '''
        capabilities = {
            ResourceAgentState.UNINITIALIZED: [
                ResourceAgentEvent.GO_ACTIVE,
                ResourceAgentEvent.RESET,
            ],
            ResourceAgentState.IDLE: [
                ResourceAgentEvent.GO_INACTIVE,
                ResourceAgentEvent.RESET,
                ResourceAgentEvent.RUN,
            ],
            ResourceAgentState.COMMAND: [
                ResourceAgentEvent.CLEAR, ResourceAgentEvent.RESET,
                ResourceAgentEvent.GO_INACTIVE, ResourceAgentEvent.PAUSE
            ],
            ResourceAgentState.STREAMING:
            [ResourceAgentEvent.RESET, ResourceAgentEvent.GO_INACTIVE],
            ResourceAgentState.LOST_CONNECTION:
            [ResourceAgentEvent.RESET, ResourceAgentEvent.GO_INACTIVE]
        }

        return capabilities[agent_state]

    def assert_state_change(self, target_agent_state, timeout=10):
        """
        Verify the agent and resource states change as expected within the timeout
        Fail if the state doesn't change to the expected state.
        @param target_agent_state: State we expect the agent to be in
        @param timeout: how long to wait for the driver to change states
        """
        to = gevent.Timeout(timeout)
        to.start()
        done = False
        agent_state = None

        try:
            while (not done):

                agent_state = self._dsa_client.get_agent_state()
                log.error("Current agent state: %s", agent_state)

                if (agent_state == target_agent_state):
                    log.debug("Current state match: %s", agent_state)
                    done = True

                if not done:
                    log.debug(
                        "state mismatch, waiting for state to transition.")
                    gevent.sleep(1)
        except Timeout:
            log.error(
                "Failed to transition agent state to %s, current state: %s",
                target_agent_state, agent_state)
            self.fail("Failed to transition state.")
        finally:
            to.cancel()