Пример #1
0
    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(
            node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()
Пример #2
0
    def test_last_update_cache(self):
        handle = self.start_worker()
        queue = Queue()
        o_process = handle.process
        def new_process(msg):
            o_process(msg)
            queue.put(True)
        handle.process = new_process



        definition = SBE37_CDM_stream_definition()
        publisher = Publisher()

        stream_def_id = self.pubsub_cli.create_stream_definition(container=definition)
        stream_id = self.pubsub_cli.create_stream(stream_definition_id=stream_def_id)

        time = float(0.0)

        for granule in self.make_points(definition=definition, stream_id=stream_id, N=10):

            publisher.publish(granule, to_name=(self.XP, stream_id+'.data'))
            # Determinism sucks
            try:
                queue.get(timeout=5)
            except Empty:
                self.assertTrue(False, 'Process never received the message.')

            doc = self.db.read(stream_id)
            ntime = doc.variables['time'].value
            self.assertTrue(ntime >= time, 'The documents did not sequentially get updated correctly.')
            time = ntime
Пример #3
0
class DensityTransform(TransformFunction):
    ''' A basic transform that receives input through a subscription,
    parses the input from a CTD, extracts the conductivity, pressure and Temperature value and calculates density
    according to the defined algorithm. If the transform
    has an output_stream it will publish the output on the output stream.

    '''

    # Make the stream definitions of the transform class attributes... best available option I can think of?
    incoming_stream_def = SBE37_CDM_stream_definition()
    outgoing_stream_def = L2_density_stream_definition()

    def execute(self, granule):
        """Processes incoming data!!!!
        """

        # Use the deconstructor to pull data from a granule
        psd = PointSupplementStreamParser(
            stream_definition=self.incoming_stream_def, stream_granule=granule)

        conductivity = psd.get_values('conductivity')
        pressure = psd.get_values('pressure')
        temperature = psd.get_values('temperature')

        longitude = psd.get_values('longitude')
        latitude = psd.get_values('latitude')
        height = psd.get_values('height')
        time = psd.get_values('time')

        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got pressure: %s' % str(pressure))
        log.warn('Got temperature: %s' % str(temperature))

        sp = SP_from_cndr(r=conductivity / cte.C3515,
                          t=temperature,
                          p=pressure)

        sa = SA_from_SP(sp, pressure, longitude, latitude)

        density = rho(sa, temperature, pressure)

        log.warn('Got density: %s' % str(density))

        # Use the constructor to put data into a granule
        psc = PointSupplementConstructor(
            point_definition=self.outgoing_stream_def,
            stream_id=self.streams['output'])
        ### Assumes the config argument for output streams is known and there is only one 'output'.
        ### the stream id is part of the metadata which much go in each stream granule - this is awkward to do at the
        ### application level like this!

        for i in xrange(len(density)):
            point_id = psc.add_point(time=time[i],
                                     location=(longitude[i], latitude[i],
                                               height[i]))
            psc.add_scalar_point_coverage(point_id=point_id,
                                          coverage_id='density',
                                          value=density[i])

        return psc.close_stream_granule()
    def setUp(self):
        # Start container

        logging.disable(logging.ERROR)
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # simulate preloading
        preload_ion_params(self.container)
        logging.disable(logging.NOTSET)

        #Instantiate a process to represent the test
        process=VisualizationServiceTestProcess()

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)
        self.damsclient = DataAcquisitionManagementServiceProcessClient(node=self.container.node, process=process)
        self.pubsubclient =  PubsubManagementServiceProcessClient(node=self.container.node, process=process)
        self.ingestclient = IngestionManagementServiceProcessClient(node=self.container.node, process=process)
        self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataproductclient = DataProductManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataprocessclient = DataProcessManagementServiceProcessClient(node=self.container.node, process=process)
        self.datasetclient =  DatasetManagementServiceProcessClient(node=self.container.node, process=process)
        self.workflowclient = WorkflowManagementServiceProcessClient(node=self.container.node, process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(node=self.container.node, process=process)
        self.data_retriever = DataRetrieverServiceProcessClient(node=self.container.node, process=process)
        self.vis_client = VisualizationServiceProcessClient(node=self.container.node, process=process)

        self.ctd_stream_def = SBE37_CDM_stream_definition()
Пример #5
0
    def _create_ctd_input_stream_and_data_product(
            self, data_product_name='ctd_parsed'):

        cc = self.container
        assertions = self.assertTrue

        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            container=self.ctd_stream_def, name='Simulated CTD data')

        log.debug('Creating new CDM data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
                           name=data_product_name,
                           description='ctd stream test')
        try:
            ctd_parsed_data_product_id = self.dataproductclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except Exception as ex:
            self.fail("failed to create new data product: %s" % ex)

        log.debug('new ctd_parsed_data_product_id = ',
                  ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj, _ = self.rrclient.find_resources(
            restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name='SBE37IMDevice',
                                       description="SBE37IMDevice",
                                       serial_number="12345")
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product_id,
            persist_data=False,
            persist_metadata=False)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id,
                                                   PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id
Пример #6
0
    def test_find_stream_definition(self):
        definition = SBE37_CDM_stream_definition()
        definition_id = self.pubsub_cli.create_stream_definition(container=definition)
        stream_id = self.pubsub_cli.create_stream(stream_definition_id=definition_id)

        res_id = self.pubsub_cli.find_stream_definition(stream_id=stream_id, id_only=True)
        self.assertTrue(res_id==definition_id, 'The returned id did not match the definition_id')

        res_obj = self.pubsub_cli.find_stream_definition(stream_id=stream_id, id_only=False)
        self.assertTrue(isinstance(res_obj.container, StreamDefinitionContainer),
            'The container object is not a stream definition.')
Пример #7
0
class SalinityTransform(TransformFunction):
    '''
    L2 Transform for CTD Data.
    Input is conductivity temperature and pressure delivered as a single packet.
    Output is Practical Salinity as calculated by the Gibbs Seawater package
    '''

    outgoing_stream_def = L2_practical_salinity_stream_definition()

    incoming_stream_def = SBE37_CDM_stream_definition()




    def execute(self, granule):
        """Processes incoming data!!!!
        """

        # Use the deconstructor to pull data from a granule
        psd = PointSupplementStreamParser(stream_definition=self.incoming_stream_def, stream_granule=granule)


        conductivity = psd.get_values('conductivity')
        pressure = psd.get_values('pressure')
        temperature = psd.get_values('temperature')

        longitude = psd.get_values('longitude')
        latitude = psd.get_values('latitude')
        height = psd.get_values('height')
        time = psd.get_values('time')



        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got pressure: %s' % str(pressure))
        log.warn('Got temperature: %s' % str(temperature))


        salinity = SP_from_cndr(r=conductivity/cte.C3515, t=temperature, p=pressure)

        log.warn('Got salinity: %s' % str(salinity))


        # Use the constructor to put data into a granule
        psc = PointSupplementConstructor(point_definition=self.outgoing_stream_def, stream_id=self.streams['output'])

        for i in xrange(len(salinity)):
            point_id = psc.add_point(time=time[i],location=(longitude[i],latitude[i],height[i]))
            psc.add_scalar_point_coverage(point_id=point_id, coverage_id='salinity', value=salinity[i])

        return psc.close_stream_granule()
Пример #8
0
    def test_strem_def_not_found(self):

        with self.assertRaises(NotFound):
            self.pubsub_cli.find_stream_definition(stream_id='nonexistent')

        definition = SBE37_CDM_stream_definition()
        definition_id = self.pubsub_cli.create_stream_definition(container=definition)

        with self.assertRaises(NotFound):
            self.pubsub_cli.find_stream_definition(stream_id='nonexistent')

        stream_id = self.pubsub_cli.create_stream()

        with self.assertRaises(NotFound):
            self.pubsub_cli.find_stream_definition(stream_id=stream_id)
Пример #9
0
    def make_some_data(self):
        import numpy as np

        stream_id = 'I am very special'
        definition = SBE37_CDM_stream_definition()
        definition.stream_resource_id = stream_id

        self.couch.create(definition)

        total = 200
        n = 10  # at most n records per granule
        i = 0

        while i < total:
            r = random.randint(1, n)

            psc = PointSupplementConstructor(point_definition=definition,
                                             stream_id=stream_id)
            for x in xrange(r):
                i += 1
                point_id = psc.add_point(time=i, location=(0, 0, 0))
                psc.add_scalar_point_coverage(
                    point_id=point_id,
                    coverage_id='temperature',
                    value=np.random.normal(loc=48.0, scale=4.0, size=1)[0])
                psc.add_scalar_point_coverage(point_id=point_id,
                                              coverage_id='pressure',
                                              value=np.float32(1.0))
                psc.add_scalar_point_coverage(point_id=point_id,
                                              coverage_id='conductivity',
                                              value=np.float32(2.0))
            granule = psc.close_stream_granule()
            hdf_string = granule.identifiables[
                definition.data_stream_id].values
            sha1 = hashlib.sha1(hdf_string).hexdigest().upper()
            with open(
                    FileSystem.get_hierarchical_url(FS.CACHE,
                                                    '%s.hdf5' % sha1),
                    'w') as f:
                f.write(hdf_string)
            granule.identifiables[definition.data_stream_id].values = ''
            self.couch.create(granule)
Пример #10
0
    def test_createTransformsThenActivateInstrument(self):

        # Set up the preconditions
        # ingestion configuration parameters
        self.exchange_point_id = 'science_data'
        self.number_of_workers = 2
        self.hdf_storage = HdfStorage(relative_path='ingest')
        self.couch_storage = CouchStorage(datastore_name='test_datastore')
        self.XP = 'science_data'
        self.exchange_name = 'ingestion_queue'

        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel",
                                  model_label="SBE37IMModel")
        try:
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" % ex)
        print 'test_createTransformsThenActivateInstrument: new InstrumentModel id = ', instModel_id

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_module="ion.agents.instrument.instrument_agent",
            driver_class="InstrumentAgent")
        try:
            instAgent_id = self.imsclient.create_instrument_agent(
                instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" % ex)
        print 'test_createTransformsThenActivateInstrument: new InstrumentAgent id = ', instAgent_id

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        try:
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" % ex)

        print 'test_createTransformsThenActivateInstrument: new InstrumentDevice id = ', instDevice_id

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------
        #        driver_config = {
        #            'svr_addr': "localhost",
        #            'cmd_port': 5556,
        #            'evt_port': 5557,
        #            'dvr_mod': "ion.agents.instrument.drivers.sbe37.sbe37_driver",
        #            'dvr_cls': "SBE37Driver",
        #            'comms_config': {
        #                    'addr': 'sbe37-simulator.oceanobservatories.org',
        #                    'port': 4001,
        #                }
        #            }
        driver_config = {
            'dvr_mod': 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'workdir': '/tmp/',
        }

        instAgentInstance_obj = IonObject(
            RT.InstrumentAgentInstance,
            name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            driver_config=driver_config,
            comms_device_address='sbe37-simulator.oceanobservatories.org',
            comms_device_port=4001,
            port_agent_work_dir='/tmp/',
            port_agent_delimeter=['<<', '>>'])
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            container=ctd_stream_def)

        print 'test_createTransformsThenActivateInstrument: new Stream Definition id = ', instDevice_id

        print 'Creating new CDM data product with a stream definition'
        dp_obj = IonObject(RT.DataProduct,
                           name='ctd_parsed',
                           description='ctd stream test')
        try:
            ctd_parsed_data_product = self.dataproductclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product,
                                                   PRED.hasStream, None, True)
        print 'test_createTransformsThenActivateInstrument: Data product streams1 = ', stream_ids

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        print 'test_createTransformsThenActivateInstrument: Creating new RAW data product with a stream definition'
        raw_stream_def = SBE37_RAW_stream_definition()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(
            container=raw_stream_def)

        dp_obj = IonObject(RT.DataProduct,
                           name='ctd_raw',
                           description='raw stream test')
        try:
            ctd_raw_data_product = self.dataproductclient.create_data_product(
                dp_obj, raw_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        print 'new ctd_raw_data_product_id = ', ctd_raw_data_product

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_raw_data_product,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product,
                                                   PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition ctd_L0_all"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L0_all',
            description='transform ctd package into three separate L0 streams',
            module='ion.processes.data.transforms.ctd.ctd_L0_all',
            class_name='ctd_L0_all',
            process_source='some_source_reference')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new ctd_L0_all data process definition: %s" %
                ex)

        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1ConductivityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_conductivity',
            description='create the L1 conductivity data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
            class_name='CTDL1ConductivityTransform',
            process_source='CTDL1ConductivityTransform source code here...')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1ConductivityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1PressureTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_pressure',
            description='create the L1 pressure data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
            class_name='CTDL1PressureTransform',
            process_source='CTDL1PressureTransform source code here...')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1PressureTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1TemperatureTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L1_temperature',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
            class_name='CTDL1TemperatureTransform',
            process_source='CTDL1TemperatureTransform source code here...')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new CTDL1TemperatureTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition SalinityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_salinity',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform',
            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new SalinityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug(
            "TestIntDataProcessMgmtServiceMultiOut: create data process definition DensityTransform"
        )
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_L2_density',
            description='create the L1 temperature data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_density',
            class_name='DensityTransform',
            process_source='DensityTransform source code here...')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except BadRequest as ex:
            self.fail(
                "failed to create new DensityTransform data process definition: %s"
                % ex)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l0_conductivity, name='L0_Conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id)

        outgoing_stream_l0_pressure = L0_pressure_stream_definition()
        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l0_pressure, name='L0_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id)

        outgoing_stream_l0_temperature = L0_temperature_stream_definition()
        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l0_temperature, name='L0_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id)

        self.output_products = {}
        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L0 conductivity"
        )
        ctd_l0_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity')
        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_conductivity_output_dp_obj,
            outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l0_conductivity_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L0 pressure"
        )
        ctd_l0_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure')
        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l0_pressure_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L0 temperature"
        )
        ctd_l0_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature')
        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_temperature_output_dp_obj,
            outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l0_temperature_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity = L1_conductivity_stream_definition()
        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l1_conductivity, name='L1_conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_conductivity_id,
            ctd_L1_conductivity_dprocdef_id)

        outgoing_stream_l1_pressure = L1_pressure_stream_definition()
        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l1_pressure, name='L1_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id)

        outgoing_stream_l1_temperature = L1_temperature_stream_definition()
        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l1_temperature, name='L1_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L1 conductivity"
        )
        ctd_l1_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity')
        ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_conductivity_output_dp_obj,
            outgoing_stream_l1_conductivity_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l1_conductivity_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L1 pressure"
        )
        ctd_l1_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Pressure',
            description='transform output L1 pressure')
        ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l1_pressure_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L1 temperature"
        )
        ctd_l1_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Temperature',
            description='transform output L1 temperature')
        ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_temperature_output_dp_obj,
            outgoing_stream_l1_temperature_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l1_temperature_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity = L2_practical_salinity_stream_definition()
        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l2_salinity, name='L2_salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id)

        outgoing_stream_l2_density = L2_density_stream_definition()
        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(
            container=outgoing_stream_l2_density, name='L2_Density')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L2 Salinity"
        )
        ctd_l2_salinity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Salinity',
            description='transform output L2 salinity')
        ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l2_salinity_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        log.debug(
            "test_createTransformsThenActivateInstrument: create output data product L2 Density"
        )
        ctd_l2_density_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L2_Density',
            description='transform output pressure')
        ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_l2_density_output_dp_id,
            persist_data=True,
            persist_metadata=True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L0 all data_process start"
        )
        try:
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L0_all_dprocdef_id, [ctd_parsed_data_product],
                self.output_products)
            self.dataprocessclient.activate_data_process(
                ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L0 all data_process return"
        )

        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L1 Conductivity data_process start"
        )
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L1_conductivity_dprocdef_id,
                [ctd_l0_conductivity_output_dp_id],
                {'output': ctd_l1_conductivity_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L1 Conductivity data_process return"
        )

        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process start"
        )
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id],
                {'output': ctd_l1_pressure_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process return"
        )

        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process start"
        )
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L1_temperature_dprocdef_id,
                [ctd_l0_temperature_output_dp_id],
                {'output': ctd_l1_temperature_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L1_Pressure data_process return"
        )

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_salinity data_process start"
        )
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product],
                {'output': ctd_l2_salinity_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_salinity data_process return"
        )

        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_Density data_process start"
        )
        try:
            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(
                ctd_L2_density_dprocdef_id, [ctd_parsed_data_product],
                {'output': ctd_l2_density_output_dp_id})
            self.dataprocessclient.activate_data_process(
                l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" % ex)

        log.debug(
            "test_createTransformsThenActivateInstrument: create L2_Density data_process return"
        )

        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        print 'test_createTransformsThenActivateInstrument: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(
            'iaclient',
            name=inst_agent_instance_obj.agent_process_id,
            process=FakeProcess())
        print 'activate_instrument: got ia client %s', self._ia_client
        log.debug(
            " test_createTransformsThenActivateInstrument:: got ia client %s",
            str(self._ia_client))

        #-------------------------------
        # Streaming
        #-------------------------------

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        print retval
        log.debug(
            "test_createTransformsThenActivateInstrument:: initialize %s",
            str(retval))

        time.sleep(1)

        cmd = AgentCommand(command='go_active')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_createTransformsThenActivateInstrument:: go_active %s",
                  str(reply))
        time.sleep(1)

        cmd = AgentCommand(command='run')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_createTransformsThenActivateInstrument:: run %s",
                  str(reply))
        time.sleep(1)

        # Make sure the sampling rate and transmission are sane.
        params = {
            SBE37Parameter.NAVG: 1,
            SBE37Parameter.INTERVAL: 5,
            SBE37Parameter.TXREALTIME: True
        }
        self._ia_client.set_param(params)

        self._no_samples = 2

        # Begin streaming.
        cmd = AgentCommand(command='go_streaming')
        reply = self._ia_client.execute_agent(cmd)
        log.debug(
            "test_createTransformsThenActivateInstrument:: go_streaming %s",
            str(reply))
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)

        time.sleep(15)

        log.debug("test_activateInstrument: calling go_observatory")
        cmd = AgentCommand(command='go_observatory')
        reply = self._ia_client.execute(cmd)
        log.debug("test_activateInstrument: return from go_observatory   %s",
                  str(reply))
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        time.sleep(2)

        log.debug(
            "test_createTransformsThenActivateInstrument:: calling reset ")
        cmd = AgentCommand(command='reset')
        reply = self._ia_client.execute_agent(cmd)
        log.debug(
            "test_createTransformsThenActivateInstrument:: return from reset %s",
            str(reply))
        time.sleep(2)

        #-------------------------------
        # Sampling
        #-------------------------------
        #        cmd = AgentCommand(command='initialize')
        #        retval = self._ia_client.execute_agent(cmd)
        #        print retval
        #        log.debug("test_createTransformsThenActivateInstrument:: initialize %s", str(retval))
        #        time.sleep(2)
        #
        #        cmd = AgentCommand(command='go_active')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: go_active %s", str(reply))
        #        time.sleep(2)
        #
        #        cmd = AgentCommand(command='run')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: run %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling acquire_sample ")
        #        cmd = AgentCommand(command='acquire_sample')
        #        reply = self._ia_client.execute(cmd)
        #        log.debug("test_activateInstrument: return from acquire_sample %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling acquire_sample 2")
        #        cmd = AgentCommand(command='acquire_sample')
        #        reply = self._ia_client.execute(cmd)
        #        log.debug("test_activateInstrument: return from acquire_sample 2   %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling acquire_sample 3")
        #        cmd = AgentCommand(command='acquire_sample')
        #        reply = self._ia_client.execute(cmd)
        #        log.debug("test_activateInstrument: return from acquire_sample 3   %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling go_inactive ")
        #        cmd = AgentCommand(command='go_inactive')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: return from go_inactive %s", str(reply))
        #        time.sleep(2)
        #
        #        log.debug("test_activateInstrument: calling reset ")
        #        cmd = AgentCommand(command='reset')
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activateInstrument: return from reset %s", str(reply))
        #        time.sleep(2)
        #
        #
        #        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)
        #
        #
        #        #get the dataset id of the ctd_parsed product from the dataproduct  ctd_parsed_data_product
        #        ctd_parsed_data_product_obj = self.dataproductclient.read_data_product(ctd_parsed_data_product)
        #        log.debug("test_createTransformsThenActivateInstrument:: ctd_parsed_data_product dataset id %s", str(ctd_parsed_data_product_obj.dataset_id))
        #
        #        # ask for the dataset bounds from the datasetmgmtsvc
        #        bounds = self.datasetclient.get_dataset_bounds(ctd_parsed_data_product_obj.dataset_id)
        #        log.debug("test_createTransformsThenActivateInstrument:: ctd_parsed_data_product dataset bounds %s", str(bounds))
        #        print 'activate_instrument: got dataset bounds %s', str(bounds)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)
Пример #11
0
    def on_start(self):

        log.debug("VizStreamProducer start")
        self.data_source_name = self.CFG.get('name')
        self.dataset = self.CFG.get('dataset')

        # create a pubsub client and a resource registry client
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)

        # Dummy instrument related clients
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.IngestClient = IngestionManagementServiceClient(
            node=self.container.node)

        # create the pubsub client
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)

        # Additional code for creating a dummy instrument
        """
        # Set up the preconditions. Look for an existing ingestion config
        while True:
            log.info("VisStreamLauncher:on_start: Waiting for an ingestion configuration to be available.")
            ingestion_cfgs, _ = self.rrclient.find_resources(RT.IngestionConfiguration, None, None, True)

            if len(ingestion_cfgs) > 0:
                break
            else:
                gevent.sleep(1)
        """

        # Check to see if the data_product already exists in the system (for e.g re launching the code after a crash)
        dp_ids, _ = self.rrclient.find_resources(RT.DataProduct, None,
                                                 self.data_source_name, True)
        if len(dp_ids) > 0:
            data_product_id = dp_ids[0]
            print '>>>>>>>>>>>>> Found dp_id = ', data_product_id
        else:
            # Create InstrumentModel
            instModel_obj = IonObject(RT.InstrumentModel,
                                      name=self.data_source_name,
                                      description=self.data_source_name,
                                      model_label=self.data_source_name)
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)

            # Create InstrumentDevice
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name=self.data_source_name,
                                       description=self.data_source_name,
                                       serial_number="12345")

            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)

            # create a stream definition for the data from the ctd simulator
            ctd_stream_def = SBE37_CDM_stream_definition()
            ctd_stream_def_id = self.pubsubclient.create_stream_definition(
                container=ctd_stream_def)

            print 'Creating new CDM data product with a stream definition'
            dp_obj = IonObject(RT.DataProduct,
                               name=self.data_source_name,
                               description='ctd stream test')
            data_product_id = self.dpclient.create_data_product(
                dp_obj, ctd_stream_def_id)

            self.damsclient.assign_data_product(
                input_resource_id=instDevice_id,
                data_product_id=data_product_id)
            self.dpclient.activate_data_product_persistence(
                data_product_id=data_product_id,
                persist_data=True,
                persist_metadata=True)

            print '>>>>>>>>>>>> New dp_id = ', data_product_id

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id,
                                                   PRED.hasStream, None, True)

        if self.dataset == 'sinusoidal':
            pid = self.container.spawn_process(
                name='ctd_test.' + self.data_source_name,
                module='ion.processes.data.sinusoidal_stream_publisher',
                cls='SinusoidalCtdPublisher',
                config={'process': {
                    'stream_id': stream_ids[0]
                }})
        else:
            pid = self.container.spawn_process(
                name='ctd_test.' + self.data_source_name,
                module='ion.processes.data.ctd_stream_publisher',
                cls='SimpleCtdPublisher',
                config={'process': {
                    'stream_id': stream_ids[0]
                }})
Пример #12
0
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel,
                                    name='ExampleDatasetModel',
                                    description="ExampleDatasetModel",
                                    datset_type="FibSeries")
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(
                datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s",
            str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent,
                                     name='datasetagent007',
                                     description="datasetagent007",
                                     handler_module=EDA_MOD,
                                     handler_class=EDA_CLS)
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(
                datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s",
            str(datasetAgent_id))

        # Create ExternalDataset
        log.debug(
            'TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset,
                                   name='ExtDataset',
                                   description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(
                extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %
                      ex)

        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ",
            str(extDataset_id))

        #register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(
            extDataset_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            container=ctd_stream_def)

        log.debug(
            "TestExternalDatasetAgentMgmt: new Stream Definition id = %s",
            str(ctd_stream_def_id))

        log.debug(
            "TestExternalDatasetAgentMgmt: Creating new data product with a stream definition"
        )
        dp_obj = IonObject(RT.DataProduct,
                           name='eoi dataset data',
                           description=' stream test')
        try:
            data_product_id1 = self.dpclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s",
                  str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id,
                                            data_product_id=data_product_id1)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s",
                  str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set('data', 'external_data')

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id':
            stream_id,  #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            'data_producer_id': dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            'taxonomy': tx.dump(),  #TODO: Currently does not support sets
            'max_records': 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config': self.DVR_CONFIG,
            'stream_config': self._stream_config,
            'agent': {
                'resource_id': EDA_RESOURCE_ID
            },
            'test_mode': True
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name='DatasetAgentInstance',
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(extDatasetAgentInstance_obj))
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s",
            str(extDatasetAgentInstance_id))

        #Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ",
            str(id), str(active))

        self.damsclient.start_external_dataset_agent_instance(
            extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(
            extDatasetAgentInstance_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ",
            str(id), str(active))

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,
                                               process=FakeProcess())
        print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s",
                  str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='resume')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='reset')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(
            extDatasetAgentInstance_id)
    def test_activateInstrumentSample(self):

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel",
                                  model_label="SBE37IMModel")
        try:
            instModel_id = self.imsclient.create_instrument_model(
                instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" % ex)
        print 'new InstrumentModel id = ', instModel_id

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_module="ion.agents.instrument.instrument_agent",
            driver_class="InstrumentAgent")
        try:
            instAgent_id = self.imsclient.create_instrument_agent(
                instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" % ex)
        print 'new InstrumentAgent id = ', instAgent_id

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        try:
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(
                instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" % ex)

        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        driver_config = {
            'dvr_mod': 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
            'dvr_cls': 'SBE37Driver',
            'workdir': '/tmp/',
        }

        instAgentInstance_obj = IonObject(
            RT.InstrumentAgentInstance,
            name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            driver_config=driver_config,
            comms_device_address='sbe37-simulator.oceanobservatories.org',
            comms_device_port=4001,
            port_agent_work_dir='/tmp/',
            port_agent_delimeter=['<<', '>>'])
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            container=ctd_stream_def)

        print 'new Stream Definition id = ', instDevice_id

        print 'Creating new CDM data product with a stream definition'
        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test')
        try:
            data_product_id1 = self.dpclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        print 'new dp_id = ', data_product_id1

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id1)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        print 'Data product streams1 = ', stream_ids

        print 'Creating new RAW data product with a stream definition'
        raw_stream_def = SBE37_RAW_stream_definition()
        raw_stream_def_id = self.pubsubcli.create_stream_definition(
            container=raw_stream_def)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test')
        try:
            data_product_id2 = self.dpclient.create_data_product(
                dp_obj, raw_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" % ex)
        print 'new dp_id = ', data_product_id2

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2,
            persist_data=True,
            persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                   PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids

        self.imsclient.start_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        print 'Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        #self._ia_client = ResourceAgentClient('123xyz', name=inst_agent_instance_obj.agent_process_id,  process=FakeProcess())
        self._ia_client = ResourceAgentClient(instDevice_id,
                                              process=FakeProcess())
        print 'activate_instrument: got ia client %s', self._ia_client
        log.debug("test_activateInstrumentSample: got ia client %s",
                  str(self._ia_client))

        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        print retval
        log.debug("test_activateInstrumentSample: initialize %s", str(retval))

        time.sleep(2)

        log.debug(
            "test_activateInstrumentSample: Sending go_active command (L4-CI-SA-RQ-334)"
        )
        cmd = AgentCommand(command='go_active')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrument: return value from go_active %s",
                  str(reply))
        time.sleep(2)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug(
            "test_activateInstrumentSample: current state after sending go_active command %s    (L4-CI-SA-RQ-334)",
            str(state))

        cmd = AgentCommand(command='run')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: run %s", str(reply))
        time.sleep(2)

        log.debug("test_activateInstrumentSample: calling acquire_sample ")
        cmd = AgentCommand(command='acquire_sample')
        reply = self._ia_client.execute(cmd)
        log.debug(
            "test_activateInstrumentSample: return from acquire_sample %s",
            str(reply))
        time.sleep(2)

        log.debug("test_activateInstrumentSample: calling acquire_sample 2")
        cmd = AgentCommand(command='acquire_sample')
        reply = self._ia_client.execute(cmd)
        log.debug(
            "test_activateInstrumentSample: return from acquire_sample 2   %s",
            str(reply))
        time.sleep(2)

        log.debug("test_activateInstrumentSample: calling acquire_sample 3")
        cmd = AgentCommand(command='acquire_sample')
        reply = self._ia_client.execute(cmd)
        log.debug(
            "test_activateInstrumentSample: return from acquire_sample 3   %s",
            str(reply))
        time.sleep(2)

        log.debug("test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command='reset')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s",
                  str(reply))
        time.sleep(2)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(
            instrument_agent_instance_id=instAgentInstance_id)
Пример #14
0
class SimpleCtdPublisher(StandaloneProcess):
    def __init__(self, *args, **kwargs):
        super(SimpleCtdPublisher, self).__init__(*args, **kwargs)
        #@todo Init stuff

    outgoing_stream_def = SBE37_CDM_stream_definition()

    def on_start(self):

        log.warn('Entering On Start!!!')
        # Get the stream(s)
        stream_id = self.CFG.get_safe('process.stream_id', {})

        self.greenlet_queue = []

        # Stream creation is done in SA, but to make the example go for demonstration create one here if it is not provided...
        if not stream_id:

            pubsub_cli = PubsubManagementServiceClient(
                node=self.container.node)

            stream_def_id = pubsub_cli.create_stream_definition(
                name='Producer stream %s' % str(uuid4()),
                container=self.outgoing_stream_def)

            stream_id = pubsub_cli.create_stream(
                name='Example CTD Data',
                stream_definition_id=stream_def_id,
                original=True,
                encoding='ION R2')

        self.stream_publisher_registrar = StreamPublisherRegistrar(
            process=self, node=self.container.node)
        # Needed to get the originator's stream_id
        self.stream_id = stream_id

        self.publisher = self.stream_publisher_registrar.create_publisher(
            stream_id=stream_id)

        self.last_time = 0

        g = Greenlet(self._trigger_func, stream_id)
        log.debug('Starting publisher thread for simple ctd data.')
        g.start()
        log.warn('Publisher Greenlet started in "%s"' %
                 self.__class__.__name__)
        self.greenlet_queue.append(g)

    def on_quit(self):
        for greenlet in self.greenlet_queue:
            greenlet.kill()
        super(SimpleCtdPublisher, self).on_quit()

    def _trigger_func(self, stream_id):

        while True:

            length = random.randint(1, 20)

            ctd_packet = self._get_ctd_packet(stream_id, length)

            log.info('SimpleCtdPublisher sending %d values!' % length)
            self.publisher.publish(ctd_packet)

            time.sleep(2.0)

    def _get_ctd_packet(self, stream_id, length):

        c = [random.uniform(0.0, 75.0) for i in xrange(length)]

        t = [random.uniform(-1.7, 21.0) for i in xrange(length)]

        p = [random.lognormvariate(1, 2) for i in xrange(length)]

        lat = [random.uniform(-90.0, 90.0) for i in xrange(length)]

        lon = [random.uniform(0.0, 360.0) for i in xrange(length)]

        tvar = [self.last_time + i for i in xrange(1, length + 1)]

        self.last_time = max(tvar)

        ctd_packet = ctd_stream_packet(stream_id=stream_id,
                                       c=c,
                                       t=t,
                                       p=p,
                                       lat=lat,
                                       lon=lon,
                                       time=tvar)

        return ctd_packet
Пример #15
0
class VizTransformGoogleDT(TransformFunction):

    """
    This class is used for  converting incoming data from CDM format to JSON style Google DataTables

    Note: One behaviour that this class is expected to achieve specifically is to determine if its supposed
        to work as a realtime transform (exists indefinitely and maintains a sliding window of data) or as
        a replay transform (one-shot).

        [2] This transform behaves as an instantaneous forwarder. There is no waiting for the entire stream
            to create the complete datatable. As the granules come in, they are translated to the datatable
            'components'. Components, because we are not creating the actual datatable in this code. That's the job
            of the viz service to put all the components of a datatable together in JSON format before sending it
            to the client

        [3] The time stamp in the incoming stream can't be converted to the datetime object here because
            the Raw stream definition only expects regular primitives (strings, floats, ints etc)

    """

    outgoing_stream_def = SBE37_RAW_stream_definition()
    #outgoing_stream_def = SBE37_CDM_stream_definition()
    incoming_stream_def = SBE37_CDM_stream_definition()

    def on_start(self):
        super(VizTransformGoogleDT,self).on_start()

        # init variables
        self.varTuple = []
        self.total_num_of_records_recvd = 0

        # init config. Need to move it to YAML or something
        self.realtime_window_size = 100
        self.max_google_dt_len = 1024 # Remove this once the decimation has been moved in to the incoming dp

        # Note some transform parameters


    def execute(self, granule):

        log.debug('(Google DT transform): Received Viz Data Packet' )

        self.dataDescription = []
        self.dataTableContent = []
        element_count_id = 0
        expected_range = []

        # NOTE : Detect somehow that this is a replay stream with a set number of expected granules. Based on this
        #       calculate the number of expected records and set the self.realtime_window_size bigger or equal to this
        #       number.



        psd = PointSupplementStreamParser(stream_definition=self.incoming_stream_def, stream_granule=granule)
        vardict = {}
        arrLen = None
        for varname in psd.list_field_names():
            vardict[varname] = psd.get_values(varname)
            arrLen = len(vardict[varname])


        #iinit the dataTable
        # create data description from the variables in the message
        self.dataDescription = [('time', 'datetime', 'time')]

        # split the data string to extract variable names
        for varname in psd.list_field_names():
            if varname == 'time':
                continue

            self.dataDescription.append((varname, 'number', varname))


        # Add the records to the datatable
        for i in xrange(arrLen):
            varTuple = []

            for varname,_,_ in self.dataDescription:
                val = float(vardict[varname][i])
                if varname == 'time':
                    #varTuple.append(datetime.fromtimestamp(val))
                    varTuple.append(val)
                else:
                    varTuple.append(val)

            # Append the tuples to the data table
            self.dataTableContent.append (varTuple)

            # Maintain a sliding window for realtime transform processes
            if len(self.dataTableContent) > self.realtime_window_size:
                # always pop the first element till window size is what we want
                while len(self.dataTableContent) > realtime_window_size:
                    self.dataTableContent.pop(0)


        """ To Do : Do we need to figure out the how many granules have been received for a replay stream ??

        if not self.realtime_flag:
            # This is the historical view part. Make a note of how many records were received
            in_data_stream_id = self.incoming_stream_def.data_stream_id
            element_count_id = self.incoming_stream_def.identifiables[in_data_stream_id].element_count_id
            # From each granule you can check the constraint on the number of records
            expected_range = granule.identifiables[element_count_id].constraint.intervals[0]

            # The number of records in a given packet is:
            self.total_num_of_records_recvd += packet.identifiables[element_count_id].value

        """

        # define an output container of data


        # submit the partial datatable to the viz service
        rdt = RecordDictionaryTool(taxonomy=tx)

        # submit resulting table back using the out stream publisher. The data_product_id is the input dp_id
        # responsible for the incoming data
        msg = {"viz_product_type": "google_realtime_dt",
               "data_product_id": "FAKE_DATAPRODUCT_ID_0000",
               "data_table_description": self.dataDescription,
               "data_table_content": self.dataTableContent}

        rdt['google_dt_components'] = numpy.array([msg])

        log.debug('Google DT transform: Sending a granule')
        out_granule = build_granule(data_producer_id='google_dt_transform', taxonomy=tx, record_dictionary=rdt)

        #self.publish(out_granule)

        # clear the tuple for future use
        self.varTuple[:] = []

        return out_granule
Пример #16
0
    def test_dm_integration(self):
        '''
        test_salinity_transform
        Test full DM Services Integration
        '''
        cc = self.container
        assertions = self.assertTrue

        #-----------------------------
        # Copy below here to run as a script (don't forget the imports of course!)
        #-----------------------------

        # Create some service clients...
        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(
            node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(
            node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)
        transform_management_service = TransformManagementServiceClient(
            node=cc.node)
        process_dispatcher = ProcessDispatcherServiceClient(node=cc.node)

        # declare some handy variables

        datastore_name = 'test_dm_integration'

        ###
        ### In the beginning there were two stream definitions...
        ###
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = pubsub_management_service.create_stream_definition(
            container=ctd_stream_def, name='Simulated CTD data')

        # create a stream definition for the data from the salinity Transform
        sal_stream_def_id = pubsub_management_service.create_stream_definition(
            container=SalinityTransform.outgoing_stream_def,
            name='Scalar Salinity data stream')

        ###
        ### And two process definitions...
        ###
        # one for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': 'ion.processes.data.ctd_stream_publisher',
            'class': 'SimpleCtdPublisher'
        }

        ctd_sim_procdef_id = process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        # one for the salinity transform
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': 'ion.processes.data.transforms.ctd.ctd_L2_salinity',
            'class': 'SalinityTransform'
        }

        salinity_transform_procdef_id = process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        #---------------------------
        # Set up ingestion - this is an operator concern - not done by SA in a deployed system
        #---------------------------
        # Configure ingestion using eight workers, ingesting to test_dm_integration datastore with the SCIDATA profile
        log.debug('Calling create_ingestion_configuration')
        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id='science_data',
            couch_storage=CouchStorage(datastore_name=datastore_name,
                                       datastore_profile='SCIDATA'),
            number_of_workers=1)
        #
        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        #---------------------------
        # Set up the producer (CTD Simulator)
        #---------------------------

        # Create the stream
        ctd_stream_id = pubsub_management_service.create_stream(
            stream_definition_id=ctd_stream_def_id)

        # Set up the datasets
        ctd_dataset_id = dataset_management_service.create_dataset(
            stream_id=ctd_stream_id,
            datastore_name=datastore_name,
            view_name='datasets/stream_join_granule')

        # Configure ingestion of this dataset
        ctd_dataset_config_id = ingestion_management_service.create_dataset_configuration(
            dataset_id=ctd_dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=
            ingestion_configuration_id,  # you need to know the ingestion configuration id!
        )
        # Hold onto ctd_dataset_config_id if you want to stop/start ingestion of that dataset by the ingestion service

        #---------------------------
        # Set up the salinity transform
        #---------------------------

        # Create the stream
        sal_stream_id = pubsub_management_service.create_stream(
            stream_definition_id=sal_stream_def_id)

        # Set up the datasets
        sal_dataset_id = dataset_management_service.create_dataset(
            stream_id=sal_stream_id,
            datastore_name=datastore_name,
            view_name='datasets/stream_join_granule')

        # Configure ingestion of the salinity as a dataset
        sal_dataset_config_id = ingestion_management_service.create_dataset_configuration(
            dataset_id=sal_dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=
            ingestion_configuration_id,  # you need to know the ingestion configuration id!
        )
        # Hold onto sal_dataset_config_id if you want to stop/start ingestion of that dataset by the ingestion service

        # Create a subscription as input to the transform
        sal_transform_input_subscription_id = pubsub_management_service.create_subscription(
            query=StreamQuery(stream_ids=[
                ctd_stream_id,
            ]),
            exchange_name='salinity_transform_input'
        )  # how do we make these names??? i.e. Should they be anonymous?

        # create the salinity transform
        sal_transform_id = transform_management_service.create_transform(
            name='example salinity transform',
            in_subscription_id=sal_transform_input_subscription_id,
            out_streams={
                'output': sal_stream_id,
            },
            process_definition_id=salinity_transform_procdef_id,
            # no configuration needed at this time...
        )
        # start the transform - for a test case it makes sense to do it before starting the producer but it is not required
        transform_management_service.activate_transform(
            transform_id=sal_transform_id)

        # Start the ctd simulator to produce some data
        configuration = {
            'process': {
                'stream_id': ctd_stream_id,
            }
        }
        ctd_sim_pid = process_dispatcher.schedule_process(
            process_definition_id=ctd_sim_procdef_id,
            configuration=configuration)

        ###
        ### Make a subscriber in the test to listen for salinity data
        ###
        salinity_subscription_id = pubsub_management_service.create_subscription(
            query=StreamQuery([
                sal_stream_id,
            ]),
            exchange_name='salinity_test',
            name="test salinity subscription",
        )

        pid = cc.spawn_process(name='dummy_process_for_test',
                               module='pyon.ion.process',
                               cls='SimpleProcess',
                               config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process,
                                                         node=cc.node)

        result = gevent.event.AsyncResult()
        results = []

        def message_received(message, headers):
            # Heads
            log.warn('Salinity data received!')
            results.append(message)
            if len(results) > 3:
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(
            exchange_name='salinity_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        pubsub_management_service.activate_subscription(
            subscription_id=salinity_subscription_id)

        # Assert that we have received data
        assertions(result.get(timeout=10))

        # stop the flow parse the messages...
        process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        for message in results:

            psd = PointSupplementStreamParser(
                stream_definition=SalinityTransform.outgoing_stream_def,
                stream_granule=message)

            # Test the handy info method for the names of fields in the stream def
            assertions('salinity' in psd.list_field_names())

            # you have to know the name of the coverage in stream def
            salinity = psd.get_values('salinity')

            import numpy

            assertions(isinstance(salinity, numpy.ndarray))

            assertions(numpy.nanmin(salinity) >
                       0.0)  # salinity should always be greater than 0
    def test_replay_integration(self):
        '''
        test_replay_integration
        '''
        import numpy as np
        # Keep the import it's used in the vector comparison below even though pycharm says its unused.

        cc = self.container
        XP = self.XP
        assertions = self.assertTrue

        ### Every thing below here can be run as a script:
        log.debug('Got it')

        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(
            node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(
            node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)

        datastore_name = 'dm_test_replay_integration'

        producer = Publisher(name=(XP, 'stream producer'))

        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id=XP,
            couch_storage=CouchStorage(datastore_name=datastore_name,
                                       datastore_profile='SCIDATA'),
            hdf_storage=HdfStorage(),
            number_of_workers=1)

        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        definition = SBE37_CDM_stream_definition()
        data_stream_id = definition.data_stream_id
        encoding_id = definition.identifiables[data_stream_id].encoding_id
        element_count_id = definition.identifiables[
            data_stream_id].element_count_id

        stream_def_id = pubsub_management_service.create_stream_definition(
            container=definition)
        stream_id = pubsub_management_service.create_stream(
            stream_definition_id=stream_def_id)

        dataset_id = dataset_management_service.create_dataset(
            stream_id=stream_id,
            datastore_name=datastore_name,
            view_name='datasets/dataset_by_id')
        ingestion_management_service.create_dataset_configuration(
            dataset_id=dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=ingestion_configuration_id)
        definition.stream_resource_id = stream_id

        packet = _create_packet(definition)
        input_file = FileSystem.mktemp()
        input_file.write(packet.identifiables[data_stream_id].values)
        input_file_path = input_file.name
        input_file.close()

        fields = [
            'conductivity', 'height', 'latitude', 'longitude', 'pressure',
            'temperature', 'time'
        ]

        input_vectors = acquire_data([input_file_path], fields, 2).next()

        producer.publish(msg=packet, to_name=(XP, '%s.data' % stream_id))

        replay_id, replay_stream_id = data_retriever_service.define_replay(
            dataset_id)
        ar = gevent.event.AsyncResult()

        def sub_listen(msg, headers):

            assertions(isinstance(msg, StreamGranuleContainer),
                       'replayed message is not a granule.')
            hdf_string = msg.identifiables[data_stream_id].values
            sha1 = hashlib.sha1(hdf_string).hexdigest().upper()
            assertions(sha1 == msg.identifiables[encoding_id].sha1,
                       'Checksum failed.')
            assertions(
                msg.identifiables[element_count_id].value == 1,
                'record replay count is incorrect %d.' %
                msg.identifiables[element_count_id].value)
            output_file = FileSystem.mktemp()
            output_file.write(msg.identifiables[data_stream_id].values)
            output_file_path = output_file.name
            output_file.close()
            output_vectors = acquire_data([output_file_path], fields, 2).next()
            for field in fields:
                comparison = (input_vectors[field]['values'] ==
                              output_vectors[field]['values'])
                assertions(
                    comparison.all(), 'vector mismatch: %s vs %s' %
                    (input_vectors[field]['values'],
                     output_vectors[field]['values']))
            FileSystem.unlink(output_file_path)
            ar.set(True)

        subscriber = Subscriber(name=(XP, 'replay listener'),
                                callback=sub_listen)

        g = gevent.Greenlet(subscriber.listen,
                            binding='%s.data' % replay_stream_id)
        g.start()

        data_retriever_service.start_replay(replay_id)

        ar.get(timeout=10)

        FileSystem.unlink(input_file_path)
Пример #18
0
class ctd_L0_all(TransformDataProcess):
    """Model for a TransformDataProcess

    """

    # Make the stream definitions of the transform class attributes
    incoming_stream_def = SBE37_CDM_stream_definition()

    outgoing_stream_pressure = L0_pressure_stream_definition()
    outgoing_stream_temperature = L0_temperature_stream_definition()
    outgoing_stream_conductivity = L0_conductivity_stream_definition()

    # Retrieve the id of the OUTPUT stream from the out Data Product for each of the three output streams
    #    stream_ids, _ = self.clients.resource_registry.find_objects(out_data_product_id, PRED.hasStream, None, True)
    #
    #    log.debug("DataProcessManagementService:create_data_process retrieve out data prod streams: %s", str(stream_ids))
    #    if not stream_ids:
    #        raise NotFound("No Stream created for output Data Product " + str(out_data_product_id))
    #    if len(stream_ids) != 1:
    #        raise BadRequest("Data Product should only have ONE stream at this time" + str(out_data_product_id))
    #    self.output_stream_dict[name] = stream_ids[0]

    def process(self, packet):
        """Processes incoming data!!!!
        """

        # Use the PointSupplementStreamParser to pull data from a granule
        psd = PointSupplementStreamParser(
            stream_definition=self.incoming_stream_def, stream_granule=packet)

        conductivity = psd.get_values('conductivity')
        pressure = psd.get_values('pressure')
        temperature = psd.get_values('temperature')

        longitude = psd.get_values('longitude')
        latitude = psd.get_values('latitude')
        height = psd.get_values('height')
        time = psd.get_values('time')

        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got pressure: %s' % str(pressure))
        log.warn('Got temperature: %s' % str(temperature))

        # do L0 scaling here.....

        # Use the constructor to put data into a granule

        psc_conductivity = PointSupplementConstructor(
            point_definition=self.outgoing_stream_conductivity,
            stream_id=self.streams['conductivity'])

        psc_pressure = PointSupplementConstructor(
            point_definition=self.outgoing_stream_pressure,
            stream_id=self.streams['pressure'])

        psc_temperature = PointSupplementConstructor(
            point_definition=self.outgoing_stream_temperature,
            stream_id=self.streams['temperature'])

        ### The stream id is part of the metadata which much go in each stream granule - this is awkward to do at the
        ### application level like this!

        for i in xrange(len(conductivity)):
            point_id = psc_conductivity.add_point(time=time[i],
                                                  location=(longitude[i],
                                                            latitude[i],
                                                            height[i]))
            psc_conductivity.add_scalar_point_coverage(
                point_id=point_id,
                coverage_id='conductivity',
                value=conductivity[i])
        self.conductivity.publish(psc_conductivity.close_stream_granule())

        for i in xrange(len(pressure)):
            point_id = psc_pressure.add_point(time=time[i],
                                              location=(longitude[i],
                                                        latitude[i],
                                                        height[i]))
            psc_pressure.add_scalar_point_coverage(point_id=point_id,
                                                   coverage_id='pressure',
                                                   value=pressure[i])
        self.pressure.publish(psc_pressure.close_stream_granule())

        for i in xrange(len(temperature)):
            point_id = psc_temperature.add_point(time=time[i],
                                                 location=(longitude[i],
                                                           latitude[i],
                                                           height[i]))
            psc_temperature.add_scalar_point_coverage(
                point_id=point_id,
                coverage_id='temperature',
                value=temperature[i])
        self.temperature.publish(psc_temperature.close_stream_granule())

        return
    def test_lookupTableProcessing(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel", model_label="SBE37IMModel" )
        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        print 'test_createTransformsThenActivateInstrument: new InstrumentModel id = ', instModel_id

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="ion.agents.instrument.instrument_agent", driver_class="InstrumentAgent" )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        print 'test_createTransformsThenActivateInstrument: new InstrumentAgent id = ', instAgent_id

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice and attachment for lookup table
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        print 'test_createTransformsThenActivateInstrument: new InstrumentDevice id = ', instDevice_id

        contents = "this is the lookup table  contents, replace with a file..."
        att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        deviceAttachment = self.rrclient.create_attachment(instDevice_id, att)
        print 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = ', deviceAttachment

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------

        driver_config = {
            'dvr_mod' : 'ion.agents.instrument.drivers.sbe37.sbe37_driver',
            'dvr_cls' : 'SBE37Driver',
            'workdir' : '/tmp/',
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", driver_config = driver_config,
                                          comms_device_address='sbe37-simulator.oceanobservatories.org',   comms_device_port=4001,  port_agent_work_dir='/tmp/', port_agent_delimeter=['<<','>>'] )
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)



        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def)

        print 'TestDataProcessWithLookupTable: new Stream Definition id = ', instDevice_id

        print 'Creating new CDM data product with a stream definition'
        dp_obj = IonObject(RT.DataProduct,name='ctd_parsed',description='ctd stream test')
        try:
            ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        print 'new ctd_parsed_data_product_id = ', ctd_parsed_data_product

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product, persist_data=True, persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)
        print 'TestDataProcessWithLookupTable: Data product streams1 = ', stream_ids

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        print 'TestDataProcessWithLookupTable: Creating new RAW data product with a stream definition'
        raw_stream_def = SBE37_RAW_stream_definition()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(container=raw_stream_def)

        dp_obj = IonObject(RT.DataProduct,name='ctd_raw',description='raw stream test')
        try:
            ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)
        except BadRequest as ex:
            self.fail("failed to create new data product: %s" %ex)
        print 'new ctd_raw_data_product_id = ', ctd_raw_data_product

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_raw_data_product, persist_data=True, persist_metadata=True)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProcessWithLookupTable: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all',
                            process_source='some_source_reference')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)

        contents = "this is the lookup table  contents for L0 Conductivity - Temperature - Pressure: Data Process Definition, replace with a file..."
        att = IonObject(RT.Attachment, name='processDefinitionLookupTable',content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        processDefinitionAttachment = self.rrclient.create_attachment(ctd_L0_all_dprocdef_id, att)
        log.debug("TestDataProcessWithLookupTable:test_createTransformsThenActivateInstrument: InstrumentDevice attachment id %s", str(processDefinitionAttachment) )
        processDefinitionAttachment_obj = self.rrclient.read(processDefinitionAttachment)
        log.debug("TestDataProcessWithLookupTable:test_createTransformsThenActivateInstrument: InstrumentDevice attachment obj %s", str(processDefinitionAttachment_obj) )

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_conductivity, name='L0_Conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id )

        outgoing_stream_l0_pressure = L0_pressure_stream_definition()
        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_pressure, name='L0_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id )

        outgoing_stream_l0_temperature = L0_temperature_stream_definition()
        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_temperature, name='L0_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id )


        self.output_products={}
        log.debug("TestDataProcessWithLookupTable: create output data product L0 conductivity")
        ctd_l0_conductivity_output_dp_obj = IonObject(RT.DataProduct, name='L0_Conductivity',description='transform output conductivity')
        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id, persist_data=True, persist_metadata=True)


        log.debug("TestDataProcessWithLookupTable: create output data product L0 pressure")
        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct, name='L0_Pressure',description='transform output pressure')
        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id, persist_data=True, persist_metadata=True)

        log.debug("TestDataProcessWithLookupTable: create output data product L0 temperature")
        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct, name='L0_Temperature',description='transform output temperature')
        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id, persist_data=True, persist_metadata=True)


        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProcessWithLookupTable: create L0 all data_process start")
        try:
            in_prods = []
            in_prods.append(ctd_parsed_data_product)
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, in_prods, self.output_products)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("TestDataProcessWithLookupTable: create L0 all data_process return")

        contents = "this is the lookup table  contents for L0 Conductivity - Temperature - Pressure: Data Process , replace with a file..."
        att = IonObject(RT.Attachment, name='processLookupTable',content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        processAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att)
        print 'TestDataProcessWithLookupTable: InstrumentDevice attachment id = ', processAttachment