def load_data_product(self):
        dset_i = 0
        dataset_management      = DatasetManagementServiceClient()
        pubsub_management       = PubsubManagementServiceClient()
        data_product_management = DataProductManagementServiceClient()
        resource_registry       = self.container.instance.resource_registry

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name='instrument_data_product_%i' % dset_i,
            description='ctd stream test',
            processing_level_code='Parsed_Canonical',
            temporal_domain = tdom,
            spatial_domain = sdom)
        pdict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id = pubsub_management.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(pubsub_management.delete_stream_definition, stream_def_id)
        data_product_id = data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.addCleanup(data_product_management.delete_data_product, data_product_id)
        data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(data_product_management.suspend_data_product_persistence, data_product_id)

        stream_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasStream', id_only=True)
        stream_id = stream_ids[0]
        route = pubsub_management.read_stream_route(stream_id)

        dataset_ids, assocs = resource_registry.find_objects(subject=data_product_id, predicate='hasDataset', id_only=True)
        dataset_id = dataset_ids[0]

        return data_product_id, stream_id, route, stream_def_id, dataset_id
    def test_ownership_searching(self):
        # Create two data products so that there is competition to the search, one is parsed 
        # (with conductivity as a parameter) and the other is raw
        dp = DataProduct(name='example dataproduct')
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        tdom, sdom = time_series_domain()
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()
        dp_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd raw', parameter_dictionary_id=pdict_id)
        dp = DataProduct(name='WRONG')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()
        self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        parameter_search = 'search "name" is "conductivity" from "resources_index"'
        results = self.poll(9, self.discovery.parse, parameter_search)
        param_id = results[0]['_id']

        data_product_search = 'search "name" is "*" from "data_products_index" and has "%s"' % param_id
        results = self.poll(9, self.discovery.parse, data_product_search)
        print results
        self.assertEquals(results[0], dp_id)
    def setup_resource(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)

        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt, 100)
        ph.publish_rdt_to_data_product(data_product_id, rdt)

        # Yield to other greenlets, had an issue with connectivity
        gevent.sleep(1)

        self.offering_id = dataset_id
    def _create_param_dicts(self):
        tdom, sdom = time_series_domain()

        self.sdom = sdom.dump()
        self.tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
Example #5
0
    def test_data_product_subscription(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()
        dp = DataProduct(name='ctd parsed')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(data_product=dp, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)

        subscription_id = self.pubsub_management.create_subscription('validator', data_product_ids=[data_product_id])
        self.addCleanup(self.pubsub_management.delete_subscription, subscription_id)

        validated = Event()
        def validation(msg, route, stream_id):
            validated.set()

        stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True)
        dp_stream_id = stream_ids.pop()

        validator = StandaloneStreamSubscriber('validator', callback=validation)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_management.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription_id)

        route = self.pubsub_management.read_stream_route(dp_stream_id)

        publisher = StandaloneStreamPublisher(dp_stream_id, route)
        publisher.publish('hi')
        self.assertTrue(validated.wait(10))
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # Instantiate a process to represent the test
        process = TransformWorkerTestProcess()

        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.ph = ParameterHelper(self.dataset_management_client,
                                  self.addCleanup)

        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
 def _make_dataset(self):
     tdom, sdom = time_series_domain()
     sdom = sdom.dump()
     tdom = tdom.dump()
     parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
     dataset_id = self.dataset_management.create_dataset('test_dataset', parameter_dictionary_id=parameter_dict_id, spatial_domain=sdom, temporal_domain=tdom)
     return dataset_id
    def test_granule_publish(self):
        log.debug("test_granule_publish ")
        self.loggerpids = []


        #retrieve the param dict from the repository
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        stream_definition_id = self.pubsubclient.create_stream_definition('parsed stream', parameter_dictionary_id=pdict_id)


        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
            name=str(uuid.uuid4()),
            description='ctd stream test',
            temporal_domain = tdom.dump(),
            spatial_domain = sdom.dump())

        data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=stream_definition_id)


        # Retrieve the id of the output stream of the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug( 'test_granule_publish: Data product streams1 = %s', stream_ids)

        pid = self.create_logger('ctd_parsed', stream_ids[0] )
        self.loggerpids.append(pid)

        rdt = RecordDictionaryTool(stream_definition_id=stream_definition_id)

        #create the publisher from the stream route
        stream_route = self.pubsubclient.read_stream_route(stream_ids[0])
        publisher = StandaloneStreamPublisher(stream_ids[0], stream_route)

        # this is one sample from the ctd driver
        tomato = {"driver_timestamp": 3555971105.1268806, "instrument_id": "ABC-123", "pkt_format_id": "JSON_Data", "pkt_version": 1, "preferred_timestamp": "driver_timestamp", "quality_flag": "ok", "stream_name": "parsed", "values": [{"value": 22.9304, "value_id": "temp"}, {"value": 51.57381, "value_id": "conductivity"}, {"value": 915.551, "value_id": "pressure"}]}

        for value in tomato['values']:
            log.debug("test_granule_publish: Looping tomato values  key: %s    val: %s ", str(value['value']), str(value['value_id']))

            if value['value_id'] in rdt:
                rdt[value['value_id']] = numpy.array( [ value['value'] ] )
                log.debug("test_granule_publish: Added data item  %s  val: %s ", str(value['value']), str(value['value_id']) )

        g = rdt.to_granule()

        publisher.publish(g)

        gevent.sleep(3)

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)
  
        #--------------------------------------------------------------------------------
        # Cleanup data products
        #--------------------------------------------------------------------------------
        dp_ids, _ = self.rrclient.find_resources(restype=RT.DataProduct, id_only=True)

        for dp_id in dp_ids:
            self.dataproductclient.delete_data_product(dp_id)
    def test_pydap(self):
        if not CFG.get_safe('bootstrap.use_pydap',False):
            raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
        
        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt,10)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.event.wait(10))


        gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity

        pydap_host = CFG.get_safe('server.pydap.host','localhost')
        pydap_port = CFG.get_safe('server.pydap.port',8001)
        url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)

        ds = open_url(url)
        np.testing.assert_array_equal(ds['time'][:], np.arange(10))
        untested = []
        for k,v in rdt.iteritems():
            if k==rdt.temporal_parameter:
                continue
            context = rdt.context(k)
            if isinstance(context.param_type, QuantityType):
                np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
            elif isinstance(context.param_type, ArrayType):
                values = np.empty(rdt[k].shape, dtype='O')
                for i,obj in enumerate(rdt[k]):
                    values[i] = str(obj)
                np.testing.assert_array_equal(ds[k][k][:][0], values)
            elif isinstance(context.param_type, ConstantType):
                np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
            elif isinstance(context.param_type, CategoryType):
                np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
            else:
                untested.append('%s (%s)' % (k,context.param_type))
        if untested:
            raise AssertionError('Untested parameters: %s' % untested)
Example #10
0
    def create_ctd_input_stream_and_data_product(self,
                                                 data_product_name='ctd_parsed'
                                                 ):

        cc = self.container
        assertions = self.assertTrue

        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_pdict_id = self.datasetclient.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            name='Simulated CTD data', parameter_dictionary_id=ctd_pdict_id)

        log.debug('Creating new CDM data product with a stream definition')

        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
                           name=data_product_name,
                           description='ctd stream test',
                           temporal_domain=tdom.dump(),
                           spatial_domain=sdom.dump())

        ctd_parsed_data_product_id = self.dataproductclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        log.debug('new ctd_parsed_data_product_id = %s' %
                  ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj, _ = self.rrclient.find_resources(
            restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name='SBE37IMDevice',
                                       description="SBE37IMDevice",
                                       serial_number="12345")
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product_id)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id,
                                                   PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id
    def test_derived_data_product(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='Instrument DP', temporal_domain=tdom.dump(), spatial_domain=sdom.dump())
        dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
        self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)


        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" % str(dp_id))
        dataset_id = dataset_ids[0]
        
        # Make the derived data product
        simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
        tempwat_dp = DataProduct(name='TEMPWAT')
        tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
        self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
        self.dpsc_cli.activate_data_product_persistence(tempwat_dp_id)
        self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, tempwat_dp_id)
        # Check that the streams associated with the data product are persisted with
        stream_ids, _ =  self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)
        rdt['pressure'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id,route)
        
        dataset_modified = Event()
        def cb(*args, **kwargs):
            dataset_modified.set()
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
        tempwat_dataset_id = tempwat_dataset_ids[0]
        granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(20))
        self.assertEquals(set(rdt.fields), set(['time','temp']))
 def _create_coverage(self, dataset_id, description, parameter_dict):
     #file_root = FileSystem.get_url(FS.CACHE,'datasets')
     temporal_domain, spatial_domain = time_series_domain()
     pdict = ParameterDictionary.load(parameter_dict)
     scov = self._create_simplex_coverage(dataset_id, pdict, spatial_domain, temporal_domain, self.inline_data_writes)
     #vcov = ViewCoverage(file_root, dataset_id, description or dataset_id, reference_coverage_location=scov.persistence_dir)
     scov.close()
     return scov
    def _create_param_dicts(self):
        tdom, sdom = time_series_domain()

        self.sdom = sdom.dump()
        self.tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
 def _make_dataset(self):
     tdom, sdom = time_series_domain()
     sdom = sdom.dump()
     tdom = tdom.dump()
     parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
     dataset = Dataset(name='test_dataset')
     dataset_id = self.dataset_management.create_dataset(dataset, parameter_dictionary_id=parameter_dict_id)
     return dataset_id
Example #15
0
 def _create_coverage(self, dataset_id, description, parameter_dict):
     #file_root = FileSystem.get_url(FS.CACHE,'datasets')
     temporal_domain, spatial_domain = time_series_domain()
     pdict = ParameterDictionary.load(parameter_dict)
     scov = self._create_simplex_coverage(dataset_id, pdict, spatial_domain,
                                          temporal_domain)
     #vcov = ViewCoverage(file_root, dataset_id, description or dataset_id, reference_coverage_location=scov.persistence_dir)
     scov.close()
     return scov
Example #16
0
    def create_transform_process(self, data_process_definition_id,
                                 data_process_input_dp_id, stream_name):

        data_process_definition = self.rrclient.read(
            data_process_definition_id)

        # Find the link between the output Stream Definition resource and the Data Process Definition resource
        stream_ids, _ = self.rrclient.find_objects(data_process_definition._id,
                                                   PRED.hasStreamDefinition,
                                                   RT.StreamDefinition,
                                                   id_only=True)
        if not stream_ids:
            raise Inconsistent(
                "The data process definition %s is missing an association to an output stream definition"
                % data_process_definition._id)
        process_output_stream_def_id = stream_ids[0]

        #Concatenate the name of the workflow and data process definition for the name of the data product output
        data_process_name = data_process_definition.name

        # Create the output data product of the transform

        tdom, sdom = time_series_domain()

        transform_dp_obj = IonObject(
            RT.DataProduct,
            name=data_process_name,
            description=data_process_definition.description,
            temporal_domain=tdom.dump(),
            spatial_domain=sdom.dump())

        transform_dp_id = self.dataproductclient.create_data_product(
            transform_dp_obj, process_output_stream_def_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=transform_dp_id)

        #last one out of the for loop is the output product id
        output_data_product_id = transform_dp_id

        # Create the  transform data process
        log.debug("create data_process and start it")
        data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition._id, [data_process_input_dp_id],
            {stream_name: transform_dp_id})
        self.dataprocessclient.activate_data_process(data_process_id)

        #Find the id of the output data stream
        stream_ids, _ = self.rrclient.find_objects(transform_dp_id,
                                                   PRED.hasStream, None, True)
        if not stream_ids:
            raise Inconsistent(
                "The data process %s is missing an association to an output stream"
                % data_process_id)

        return data_process_id, output_data_product_id
Example #17
0
 def _make_dataset(self):
     tdom, sdom = time_series_domain()
     sdom = sdom.dump()
     tdom = tdom.dump()
     parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name(
         'ctd_parsed_param_dict', id_only=True)
     dataset = Dataset(name='test_dataset')
     dataset_id = self.dataset_management.create_dataset(
         dataset, parameter_dictionary_id=parameter_dict_id)
     return dataset_id
Example #18
0
    def create_data_product_obj(self):

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        # creates an IonObject of RT.DataProduct and adds custom fields specified by dict
        return any_old(RT.DataProduct, dict(temporal_domain=tdom, spatial_domain=sdom))
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)

        self.time_dom, self.spatial_dom = time_series_domain()
Example #20
0
    def create_data_product_obj(self):

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        # creates an IonObject of RT.DataProduct and adds custom fields specified by dict
        return any_old(RT.DataProduct,
                       dict(temporal_domain=tdom, spatial_domain=sdom))
    def test_dataset_crud(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        tdom, sdom = time_series_domain()
        dataset_id = self.dataset_management.create_dataset(name='ctd_dataset', parameter_dictionary_id=pdict_id, spatial_domain=sdom.dump(), temporal_domain=tdom.dump())

        ds_obj = self.dataset_management.read_dataset(dataset_id)
        self.assertEquals(ds_obj.name, 'ctd_dataset')
        
        ds_obj.name = 'something different'
        self.dataset_management.update_dataset(ds_obj)
        ds_obj2 = self.dataset_management.read_dataset(dataset_id)
        self.assertEquals(ds_obj.name, ds_obj2.name)
    def create_dataset(self, parameter_dict_id=''):
        '''
        Creates a time-series dataset
        '''
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

        dataset_id = self.dataset_management.create_dataset('test_dataset_%i'%self.i, parameter_dictionary_id=parameter_dict_id, spatial_domain=sdom, temporal_domain=tdom)
        return dataset_id
 def _make_dataset(self):
     tdom, sdom = time_series_domain()
     sdom = sdom.dump()
     tdom = tdom.dump()
     parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name(
         'ctd_parsed_param_dict', id_only=True)
     dataset_id = self.dataset_management.create_dataset(
         'test_dataset',
         parameter_dictionary_id=parameter_dict_id,
         spatial_domain=sdom,
         temporal_domain=tdom)
     return dataset_id
 def gap_coverage(self,stream_id):
     try:
         old_cov = self._coverages.pop(stream_id)
         dataset_id = self.get_dataset(stream_id)
         sdom, tdom = time_series_domain()
         new_cov = DatasetManagementService._create_simplex_coverage(dataset_id, old_cov.parameter_dictionary, sdom, tdom, old_cov._persistence_layer.inline_data_writes)
         old_cov.close()
         result = new_cov
     except KeyError:
         result = self.get_coverage(stream_id)
     self._coverages[stream_id] = result
     return result
 def make_data_product(self, pdict_name, dp_name, available_fields=[]):
     pdict_id = self.dataset_management.read_parameter_dictionary_by_name(pdict_name, id_only=True)
     stream_def_id = self.pubsub_management.create_stream_definition('%s stream_def' % dp_name, parameter_dictionary_id=pdict_id, available_fields=available_fields or None)
     self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
     tdom, sdom = time_series_domain()
     tdom = tdom.dump()
     sdom = sdom.dump()
     dp_obj = DataProduct(name=dp_name)
     dp_obj.temporal_domain = tdom
     dp_obj.spatial_domain = sdom
     data_product_id = self.data_product_management.create_data_product(dp_obj, stream_definition_id=stream_def_id)
     self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
     return data_product_id
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)

        self.time_dom, self.spatial_dom = time_series_domain()
        self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
 def gap_coverage(self, stream_id):
     try:
         old_cov = self._coverages.pop(stream_id)
         dataset_id = self.get_dataset(stream_id)
         sdom, tdom = time_series_domain()
         new_cov = DatasetManagementService._create_simplex_coverage(
             dataset_id, old_cov.parameter_dictionary, sdom, tdom,
             old_cov._persistence_layer.inline_data_writes)
         old_cov.close()
         result = new_cov
     except KeyError:
         result = self.get_coverage(stream_id)
     self._coverages[stream_id] = result
     return result
    def create_dataset(self, parameter_dict_id=''):
        '''
        Creates a time-series dataset
        '''
        dataset_management = DatasetManagementServiceClient()
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

        dataset_id = dataset_management.create_dataset('test_dataset_', parameter_dictionary_id=parameter_dict_id, spatial_domain=sdom, temporal_domain=tdom)
        self.addCleanup(dataset_management.delete_dataset, dataset_id)
        return dataset_id
    def _set_up_DataProduct_obj(self):
        # Create data product object to be used for each of the platform log streams
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('platform_eng_parsed', id_only=True)
        self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition(
            name='platform_eng', parameter_dictionary_id=pdict_id)
        self.dp_obj = IonObject(RT.DataProduct,
            name='platform_eng data',
            description='platform_eng test',
            temporal_domain = tdom,
            spatial_domain = sdom)
    def _start_raw_ingestion(self):
        dpsc_cli = DataProductManagementServiceClient()
        rrclient = ResourceRegistryServiceClient()
        RR2 = EnhancedResourceRegistryClient(rrclient)

        # Generic time-series data domain creation
        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom.dump(),
            spatial_domain = sdom.dump())

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        log.info("Create data product... raw stream id: %s", self._raw_stream_id)
        dp_id = dpsc_cli.create_data_product_(data_product= dp_obj)
        dataset_id = self.create_dataset(self._raw_stream_pdict_id)
        RR2.assign_stream_definition_to_data_product_with_has_stream_definition(self._raw_stream_def_id, dp_id)
        RR2.assign_stream_to_data_product_with_has_stream(self._raw_stream_id, dp_id)
        RR2.assign_dataset_to_data_product_with_has_dataset(dataset_id, dp_id)
        self._raw_dataset_id = dataset_id

        log.info("Create data product...Complete")

        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        # Assert that the data product has an associated stream def at this stage
        stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True)
        self.assertNotEquals(len(stream_ids), 0)

        log.info("Activate data product persistence")
        dpsc_cli.activate_data_product_persistence(dp_id)

        log.info("Read data product")
        dp_obj = dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Created data product %s', dp_obj)
    def create_data_product(self, name='', description='', stream_def_id=''):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name=name,
            description=description,
            processing_level_code='Parsed_Canonical',
            temporal_domain=tdom,
            spatial_domain=sdom)

        data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.data_product_management.activate_data_product_persistence(data_product_id)
        return data_product_id
Example #32
0
    def create_data_product(self, name='', description='', stream_def_id=''):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(
            name=name,
            description=description,
            processing_level_code='Parsed_Canonical',
            temporal_domain=tdom,
            spatial_domain=sdom)

        data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)
        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
        return data_product_id
    def _set_up_DataProduct_obj(self):
        # Create data product object to be used for each of the platform log streams
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'platform_eng_parsed', id_only=True)
        self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition(
            name='platform_eng', parameter_dictionary_id=self.pdict_id)
        self.dp_obj = IonObject(RT.DataProduct,
                                name='platform_eng data',
                                description='platform_eng test',
                                temporal_domain=tdom,
                                spatial_domain=sdom)
Example #34
0
    def make_dp(self, stream_def_id):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        stream_def = self.resource_registry.read(stream_def_id)
        dp_obj = DataProduct(
                name=stream_def.name,
                description=stream_def.name,
                processing_level_code='Parsed_Canonical',
                temporal_domain = tdom,
                spatial_domain = sdom)


        data_product_id = self.data_product_management.create_data_product(dp_obj, stream_definition_id=stream_def_id)
        self.data_product_management.activate_data_product_persistence(data_product_id)
        return data_product_id
Example #35
0
    def make_dp(self, stream_def_id):
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        stream_def = self.resource_registry.read(stream_def_id)
        dp_obj = DataProduct(name=stream_def.name,
                             description=stream_def.name,
                             processing_level_code='Parsed_Canonical',
                             temporal_domain=tdom,
                             spatial_domain=sdom)

        data_product_id = self.data_product_management.create_data_product(
            dp_obj, stream_definition_id=stream_def_id)
        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        return data_product_id
    def test_get_data_from_FDW(self):
        # generate a data product and check that the FDW can get data
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)

        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt, 100)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.event.wait(10))

        gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity

        print "--------------------------------"
        print dataset_id
        coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
        print coverage_path
        print "--------------------------------"

        #verify table exists in the DB (similar to above)
        # ....code...

        # check that the geoserver layer exists as above
        # ... code ....

        # make a WMS/WFS request...somet like this (or both)
        url = self.gs_host+'/geoserver/geonode/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=geonode:ooi_' + dataset_id + '_ooi&maxFeatures=1&outputFormat=csv'
        r = requests.get(url)
        assertTrue(r.status_code == 200)
Example #37
0
    def create_ctd_input_stream_and_data_product(self, data_product_name='ctd_parsed'):

        cc = self.container
        assertions = self.assertTrue

        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_pdict_id = self.datasetclient.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=ctd_pdict_id)


        log.debug('Creating new CDM data product with a stream definition')

        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
            name=data_product_name,
            description='ctd stream test',
            temporal_domain = tdom.dump(),
            spatial_domain = sdom.dump())

        ctd_parsed_data_product_id = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)
        self.addCleanup(self.dataproductclient.delete_data_product, ctd_parsed_data_product_id)

        log.debug('new ctd_parsed_data_product_id = %s' % ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product_id)
        self.addCleanup(self.dataproductclient.suspend_data_product_persistence, ctd_parsed_data_product_id)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id
Example #38
0
    def create_data_product(self, name, stream_def_id="", param_dict_name="", pdict_id=""):
        if not (stream_def_id or param_dict_name or pdict_id):
            raise AssertionError("Attempted to create a Data Product without a parameter dictionary")

        tdom, sdom = time_series_domain()

        dp = DataProduct(name=name, spatial_domain=sdom.dump(), temporal_domain=tdom.dump())

        stream_def_id = stream_def_id or self.create_stream_definition(
            "%s stream def" % name,
            parameter_dictionary_id=pdict_id
            or self.RR2.find_resource_by_name(RT.ParameterDictionary, param_dict_name, id_only=True),
        )

        data_product_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
        return data_product_id
Example #39
0
    def create_transform_process(self, data_process_definition_id, data_process_input_dp_id, stream_name):

        data_process_definition = self.rrclient.read(data_process_definition_id)

        # Find the link between the output Stream Definition resource and the Data Process Definition resource
        stream_ids,_ = self.rrclient.find_objects(data_process_definition._id, PRED.hasStreamDefinition, RT.StreamDefinition,  id_only=True)
        if not stream_ids:
            raise Inconsistent("The data process definition %s is missing an association to an output stream definition" % data_process_definition._id )
        process_output_stream_def_id = stream_ids[0]

        #Concatenate the name of the workflow and data process definition for the name of the data product output
        data_process_name = data_process_definition.name

        # Create the output data product of the transform

        tdom, sdom = time_series_domain()

        transform_dp_obj = IonObject(RT.DataProduct,
            name=data_process_name,
            description=data_process_definition.description,
            temporal_domain = tdom.dump(),
            spatial_domain = sdom.dump())

        transform_dp_id = self.dataproductclient.create_data_product(transform_dp_obj, process_output_stream_def_id)

        self.dataproductclient.activate_data_product_persistence(data_product_id=transform_dp_id)

        #last one out of the for loop is the output product id
        output_data_product_id = transform_dp_id

        # Create the  transform data process
        log.debug("create data_process and start it")
        data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition_id = data_process_definition._id,
            in_data_product_ids = [data_process_input_dp_id],
            out_data_product_ids = [transform_dp_id])

        self.dataprocessclient.activate_data_process(data_process_id)


        #Find the id of the output data stream
        stream_ids, _ = self.rrclient.find_objects(transform_dp_id, PRED.hasStream, None, True)
        if not stream_ids:
            raise Inconsistent("The data process %s is missing an association to an output stream" % data_process_id )

        return data_process_id, output_data_product_id
    def create_dataset(self, parameter_dict_id=''):
        '''
        Creates a time-series dataset
        '''
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name(
                'ctd_parsed_param_dict', id_only=True)

        dataset_id = self.dataset_management.create_dataset(
            'test_dataset_%i' % self.i,
            parameter_dictionary_id=parameter_dict_id,
            spatial_domain=sdom,
            temporal_domain=tdom)
        return dataset_id
Example #41
0
    def create_data_product(self,name, stream_def_id='', param_dict_name='', pdict_id=''):
        if not (stream_def_id or param_dict_name or pdict_id):
            raise AssertionError('Attempted to create a Data Product without a parameter dictionary')

        tdom, sdom = time_series_domain()

        dp = DataProduct(name=name,
                spatial_domain = sdom.dump(),
                temporal_domain = tdom.dump(),
                )

        stream_def_id = stream_def_id or self.create_stream_definition('%s stream def' % name, 
                parameter_dictionary_id=pdict_id or self.RR2.find_resource_by_name(RT.ParameterDictionary,
                    param_dict_name, id_only=True))

        data_product_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
        return data_product_id
    def test_create_dataset_verify_geoserver_layer(self):
        #generate layer and check that the service created it in geoserver
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)

        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt, 100)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.event.wait(10))

        gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity

        log.debug("--------------------------------")
        log.debug(dataset_id)
        coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
        log.debug(coverage_path)
        log.debug("--------------------------------")

        # verify that the layer exists in geoserver
        try:
            r = requests.get(self.gs_rest_url + '/layers/ooi_' + dataset_id + '_ooi.xml', auth=(self.username, self.PASSWORD))
            self.assertTrue(r.status_code == 200)
        except Exception as e:
            log.error("check service and layer exist...%s", e)
            self.assertTrue(False)
Example #43
0
    def test_dataset_crud(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        tdom, sdom = time_series_domain()
        dataset_id = self.dataset_management.create_dataset(
            name='ctd_dataset',
            parameter_dictionary_id=pdict_id,
            spatial_domain=sdom.dump(),
            temporal_domain=tdom.dump())

        ds_obj = self.dataset_management.read_dataset(dataset_id)
        self.assertEquals(ds_obj.name, 'ctd_dataset')

        ds_obj.name = 'something different'
        self.dataset_management.update_dataset(ds_obj)
        self.dataset_management.register_dataset(dataset_id)
        ds_obj2 = self.dataset_management.read_dataset(dataset_id)
        self.assertEquals(ds_obj.name, ds_obj2.name)
        self.assertTrue(ds_obj2.registered)
    def create_dataset(self, parameter_dict_id=""):
        """
        Creates a time-series dataset
        """
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        if not parameter_dict_id:
            parameter_dict_id = self.dataset_management.read_parameter_dictionary_by_name(
                "ctd_parsed_param_dict", id_only=True
            )

        dataset_id = self.dataset_management.create_dataset(
            "test_dataset_%i" % self.i,
            parameter_dictionary_id=parameter_dict_id,
            spatial_domain=sdom,
            temporal_domain=tdom,
        )
        return dataset_id
    def test_data_product_stream_def(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)
        dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id)

        stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
        self.assertEquals(ctd_stream_def_id, stream_def_id)
Example #46
0
    def load_data_product(self):
        dset_i = 0
        dataset_management = DatasetManagementServiceClient()
        pubsub_management = PubsubManagementServiceClient()
        data_product_management = DataProductManagementServiceClient()
        resource_registry = self.container.instance.resource_registry

        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = DataProduct(name='instrument_data_product_%i' % dset_i,
                             description='ctd stream test',
                             processing_level_code='Parsed_Canonical',
                             temporal_domain=tdom,
                             spatial_domain=sdom)
        pdict_id = dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        stream_def_id = pubsub_management.create_stream_definition(
            name='parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(pubsub_management.delete_stream_definition,
                        stream_def_id)
        data_product_id = data_product_management.create_data_product(
            data_product=dp_obj, stream_definition_id=stream_def_id)
        self.addCleanup(data_product_management.delete_data_product,
                        data_product_id)
        data_product_management.activate_data_product_persistence(
            data_product_id)
        self.addCleanup(
            data_product_management.suspend_data_product_persistence,
            data_product_id)

        stream_ids, assocs = resource_registry.find_objects(
            subject=data_product_id, predicate='hasStream', id_only=True)
        stream_id = stream_ids[0]
        route = pubsub_management.read_stream_route(stream_id)

        dataset_ids, assocs = resource_registry.find_objects(
            subject=data_product_id, predicate='hasDataset', id_only=True)
        dataset_id = dataset_ids[0]

        return data_product_id, stream_id, route, stream_def_id, dataset_id
Example #47
0
    def test_data_product_stream_def(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)
        dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id)

        stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
        self.assertEquals(ctd_stream_def_id, stream_def_id)
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.dataset_management_client = DatasetManagementServiceClient(
            node=self.container.node)
        self.pubsub_client = PubsubManagementServiceClient(
            node=self.container.node)

        self.time_dom, self.spatial_dom = time_series_domain()

        self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict', id_only=True)

        self.stream_def_id = self.pubsub_client.create_stream_definition(
            name='stream_def', parameter_dictionary_id=self.parameter_dict_id)
        self.addCleanup(self.pubsub_client.delete_stream_definition,
                        self.stream_def_id)

        self.stream_id, self.route_id = self.pubsub_client.create_stream(
            name='parsed_stream',
            stream_definition_id=self.stream_def_id,
            exchange_point='science_data')
        self.addCleanup(self.pubsub_client.delete_stream, self.stream_id)

        self.subscription_id = self.pubsub_client.create_subscription(
            name='parsed_subscription',
            stream_ids=[self.stream_id],
            exchange_name='parsed_subscription')
        self.addCleanup(self.pubsub_client.delete_subscription,
                        self.subscription_id)

        self.pubsub_client.activate_subscription(self.subscription_id)
        self.addCleanup(self.pubsub_client.deactivate_subscription,
                        self.subscription_id)

        self.publisher = StandaloneStreamPublisher(self.stream_id,
                                                   self.route_id)
Example #49
0
    def test_ctdbp_L0_all(self):
        """
        Test packets processed by the ctdbp_L0_all transform
        """

        #----------- Data Process Definition --------------------------------

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='CTDBP_L0_all',
            description=
            'Take parsed stream and put the C, T and P into three separate L0 streams.',
            module='ion.processes.data.transforms.ctdbp.ctdbp_L0',
            class_name='CTDBP_L0_all')

        dprocdef_id = self.data_process_management.create_data_process_definition(
            dpd_obj)
        self.addCleanup(
            self.data_process_management.delete_data_process_definition,
            dprocdef_id)

        log.debug("created data process definition: id = %s", dprocdef_id)

        #----------- Data Products --------------------------------

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        input_param_dict = self._create_input_param_dict_for_test(
            parameter_dict_name='fictitious_ctdp_param_dict')

        # Get the stream definition for the stream using the parameter dictionary
        #        input_param_dict = self.dataset_management.read_parameter_dictionary_by_name('ctdbp_cdef_sample', id_only=True)
        input_stream_def_dict = self.pubsub.create_stream_definition(
            name='parsed', parameter_dictionary_id=input_param_dict)
        self.addCleanup(self.pubsub.delete_stream_definition,
                        input_stream_def_dict)

        log.debug("Got the parsed parameter dictionary: id: %s",
                  input_param_dict)
        log.debug("Got the stream def for parsed input: %s",
                  input_stream_def_dict)

        # Input data product
        parsed_stream_dp_obj = IonObject(
            RT.DataProduct,
            name='parsed_stream',
            description='Parsed stream input to CTBP L0 transform',
            temporal_domain=tdom,
            spatial_domain=sdom)

        input_dp_id = self.dataproduct_management.create_data_product(
            data_product=parsed_stream_dp_obj,
            stream_definition_id=input_stream_def_dict)
        self.addCleanup(self.dataproduct_management.delete_data_product,
                        input_dp_id)

        # output data product
        L0_stream_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_stream',
            description='L0_stream output of CTBP L0 transform',
            temporal_domain=tdom,
            spatial_domain=sdom)

        L0_stream_dp_id = self.dataproduct_management.create_data_product(
            data_product=L0_stream_dp_obj,
            stream_definition_id=input_stream_def_dict)
        self.addCleanup(self.dataproduct_management.delete_data_product,
                        L0_stream_dp_id)

        # We need the key name here to be "L0_stream", since when the data process is launched, this name goes into
        # the config as in config.process.publish_streams.L0_stream when the config is used to launch the data process
        out_stream_ids, _ = self.resource_registry.find_objects(
            L0_stream_dp_id, PRED.hasStream, RT.Stream, True)
        self.assertTrue(len(out_stream_ids))
        output_stream_id = out_stream_ids[0]

        dproc_id = self.data_process_management.create_data_process(
            data_process_definition_id=dprocdef_id,
            in_data_product_ids=[input_dp_id],
            out_data_product_ids=[L0_stream_dp_id],
            configuration=None)

        self.addCleanup(self.data_process_management.delete_data_process,
                        dproc_id)

        log.debug("Created a data process for ctdbp_L0. id: %s", dproc_id)

        # Activate the data process
        self.data_process_management.activate_data_process(dproc_id)
        self.addCleanup(self.data_process_management.deactivate_data_process,
                        dproc_id)

        #----------- Find the stream that is associated with the input data product when it was created by create_data_product() --------------------------------

        stream_ids, _ = self.resource_registry.find_objects(
            input_dp_id, PRED.hasStream, RT.Stream, True)
        self.assertTrue(len(stream_ids))

        input_stream_id = stream_ids[0]
        stream_route = self.pubsub.read_stream_route(input_stream_id)

        log.debug("The input stream for the L0 transform: %s", input_stream_id)

        #----------- Create a subscriber that will listen to the transform's output --------------------------------

        ar = gevent.event.AsyncResult()

        def subscriber(m, r, s):
            ar.set(m)

        sub = StandaloneStreamSubscriber(exchange_name='sub',
                                         callback=subscriber)

        sub_id = self.pubsub.create_subscription('subscriber_to_transform',
                                                 stream_ids=[output_stream_id],
                                                 exchange_name='sub')
        self.addCleanup(self.pubsub.delete_subscription, sub_id)

        self.pubsub.activate_subscription(sub_id)
        self.addCleanup(self.pubsub.deactivate_subscription, sub_id)

        sub.start()
        self.addCleanup(sub.stop)

        #----------- Publish on that stream so that the transform can receive it --------------------------------

        pub = StandaloneStreamPublisher(input_stream_id, stream_route)
        publish_granule = self._get_new_ctd_packet(
            stream_definition_id=input_stream_def_dict, length=5)

        pub.publish(publish_granule)

        log.debug("Published the following granule: %s", publish_granule)

        granule_from_transform = ar.get(timeout=20)

        log.debug("Got the following granule from the transform: %s",
                  granule_from_transform)

        # Check that the granule published by the L0 transform has the right properties
        self._check_granule_from_transform(granule_from_transform)
Example #50
0
    def base_activate_deployment(self):

        #-------------------------------------------------------------------------------------
        # Create platform site, platform device, platform model
        #-------------------------------------------------------------------------------------

        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site')
        platform_site_id = self.omsclient.create_platform_site(
            platform_site__obj)

        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice1',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(
            platform_device_obj)

        platform_model__obj = IonObject(RT.PlatformModel,
                                        name='PlatformModel1',
                                        description='test platform model')
        platform_model_id = self.imsclient.create_platform_model(
            platform_model__obj)

        #-------------------------------------------------------------------------------------
        # Create instrument site
        #-------------------------------------------------------------------------------------

        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(
            instrument_site_obj, platform_site_id)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.psmsclient.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
                           name='Log Data Product',
                           description='some new dp',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        out_log_data_product_id = self.dmpsclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        #----------------------------------------------------------------------------------------------------
        # Start the transform (a logical transform) that acts as an instrument site
        #----------------------------------------------------------------------------------------------------

        self.omsclient.create_site_data_product(
            site_id=instrument_site_id,
            data_product_id=out_log_data_product_id)

        #----------------------------------------------------------------------------------------------------
        # Create an instrument device
        #----------------------------------------------------------------------------------------------------

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                          name='InstrumentDevice1',
                                          description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(
            instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice,
                                         instrument_device_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='Instrument Data Product',
                           description='some new dp',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        inst_data_product_id = self.dmpsclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        #assign data products appropriately
        self.damsclient.assign_data_product(
            input_resource_id=instrument_device_id,
            data_product_id=inst_data_product_id)
        #----------------------------------------------------------------------------------------------------
        # Create an instrument model
        #----------------------------------------------------------------------------------------------------

        instrument_model_obj = IonObject(RT.InstrumentModel,
                                         name='InstrumentModel1',
                                         description='test instrument model')
        instrument_model_id = self.imsclient.create_instrument_model(
            instrument_model_obj)

        #----------------------------------------------------------------------------------------------------
        # Create a deployment object
        #----------------------------------------------------------------------------------------------------

        start = IonTime(datetime.datetime(2013, 1, 1))
        end = IonTime(datetime.datetime(2014, 1, 1))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start.to_string(),
                                    end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        ret = DotDict(instrument_site_id=instrument_site_id,
                      instrument_device_id=instrument_device_id,
                      instrument_model_id=instrument_model_id,
                      platform_site_id=platform_site_id,
                      platform_device_id=platform_device_id,
                      platform_model_id=platform_model_id,
                      deployment_id=deployment_id)

        return ret
Example #51
0
    def _prepare_things_you_need_to_launch_transform(self, name_of_transform = ''):

        module, class_name = self._get_class_module(name_of_transform)

        #-------------------------------------------------------------------------
        # Data Process Definition
        #-------------------------------------------------------------------------

        dpd_obj = IonObject(RT.DataProcessDefinition,
            name= 'CTDBP_%s_Transform' % name_of_transform,
            description= 'Data Process Definition for the CTDBP %s transform.' % name_of_transform,
            module= module,
            class_name=class_name)

        data_proc_def_id = self.data_process_management.create_data_process_definition(dpd_obj)
        self.addCleanup(self.data_process_management.delete_data_process_definition, data_proc_def_id)
        log.debug("created data process definition: id = %s", data_proc_def_id)

        #-------------------------------------------------------------------------
        # Construct temporal and spatial Coordinate Reference System objects for the data product objects
        #-------------------------------------------------------------------------

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        #-------------------------------------------------------------------------
        # Get the names of the input and output data products
        #-------------------------------------------------------------------------

        input_dpod_id = ''
        output_dpod_id = ''

        if name_of_transform == 'L0':

            input_dpod_id = self._create_input_data_product('parsed', tdom, sdom)
            output_dpod_id = self._create_output_data_product('L0', tdom, sdom)

            self.in_prod_for_L1 = output_dpod_id

        elif name_of_transform == 'L1':

            input_dpod_id = self.in_prod_for_L1
            output_dpod_id = self._create_output_data_product('L1', tdom, sdom)

            self.in_prod_for_L2 = output_dpod_id

        elif name_of_transform == 'L2_density':

            input_dpod_id = self.in_prod_for_L2
            output_dpod_id = self._create_output_data_product('L2_density', tdom, sdom)

        elif name_of_transform == 'L2_salinity':

            input_dpod_id = self.in_prod_for_L2
            output_dpod_id = self._create_output_data_product('L2_salinity', tdom, sdom)

        else:
            self.fail("something bad happened")

        return [data_proc_def_id, input_dpod_id, output_dpod_id]
    def test_get_provenance(self):

        #create a deployment with metadata and an initial site and device
        instrument_site_obj = IonObject(RT.InstrumentSite,
                                        name='InstrumentSite1',
                                        description='test instrument site')
        instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "")
        log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id))


        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel" )

        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id))

        self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id)


        # Create InstrumentAgent
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 )
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                name='agent007',
                                description="SBE37IMAgent",
                                driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg",
                                stream_configurations = [parsed_config] )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        log.debug("test_get_provenance: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ", instDevice_id)


        #-------------------------------
        # Create CTD Parsed  data product
        #-------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)

        log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition')


        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s', ctd_parsed_data_product)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)
        self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        #-------------------------------
        # create a data product for the site to pass the OMS check.... we need to remove this check
        #-------------------------------
        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id)
        self.omsclient.create_site_data_product(instrument_site_id, log_data_product_id)


        #-------------------------------
        # Deploy instrument device to instrument site
        #-------------------------------
        deployment_obj = IonObject(RT.Deployment,
                                        name='TestDeployment',
                                        description='some new deployment')
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id)
        self.imsclient.deploy_instrument_device(instDevice_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )

        self.omsclient.activate_deployment(deployment_id)
        inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False)
        log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) )

        #-------------------------------
        # Create the agent instance
        #-------------------------------

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }


        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
            description="SBE37IMAgentInstance",
            port_agent_config = port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)


        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_conductivity',
                            description='create the L1 conductivity data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
                            class_name='CTDL1ConductivityTransform')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_pressure',
                            description='create the L1 pressure data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
                            class_name='CTDL1PressureTransform')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex)


        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_temperature',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
                            class_name='CTDL1TemperatureTransform')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_salinity',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
                            class_name='SalinityTransform')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug("TestDataProductProvenance: create data process definition DensityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_density',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_density',
                            class_name='DensityTransform')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new DensityTransform data process definition: %s" %ex)



        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}
        log.debug("TestDataProductProvenance: create output data product L0 conductivity")

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)

        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id


        log.debug("TestDataProductProvenance: create output data product L0 pressure")

        ctd_l0_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L0_Pressure',
                                                    description='transform output pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                            outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        log.debug("TestDataProductProvenance: create output data product L0 temperature")

        ctd_l0_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L0_Temperature',
                                                        description='transform output temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' )

        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' )

        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' )

        log.debug("TestDataProductProvenance: create output data product L1 conductivity")

        ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj,
                                                                                outgoing_stream_l1_conductivity_id)


        log.debug("TestDataProductProvenance: create output data product L1 pressure")

        ctd_l1_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L1_Pressure',
                                                    description='transform output L1 pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj,
                                                                            outgoing_stream_l1_pressure_id)


        log.debug("TestDataProductProvenance: create output data product L1 temperature")

        ctd_l1_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L1_Temperature',
                                                        description='transform output L1 temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj,
                                                                                outgoing_stream_l1_temperature_id)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' )

        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' )

        log.debug("TestDataProductProvenance: create output data product L2 Salinity")

        ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct,
                                                    name='L2_Salinity',
                                                    description='transform output L2 salinity',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)


        ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj,
                                                                            outgoing_stream_l2_salinity_id)


        log.debug("TestDataProductProvenance: create output data product L2 Density")

#        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
#                                                    name='L2_Density',
#                                                    description='transform output pressure',
#                                                    temporal_domain = tdom,
#                                                    spatial_domain = sdom)
#
#        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
#                                                                            outgoing_stream_l2_density_id,
#                                                                            parameter_dictionary)

        contactInfo = ContactInformation()
        contactInfo.individual_names_given = "Bill"
        contactInfo.individual_name_family = "Smith"
        contactInfo.street_address = "111 First St"
        contactInfo.city = "San Diego"
        contactInfo.email = "*****@*****.**"
        contactInfo.phones = ["858-555-6666"]
        contactInfo.country = "USA"
        contactInfo.postal_code = "92123"

        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
                                                    name='L2_Density',
                                                    description='transform output pressure',
                                                    contacts = [contactInfo],
                                                    iso_topic_category = "my_iso_topic_category_here",
                                                    quality_control_level = "1",
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj,
                                                                            outgoing_stream_l2_density_id)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L0 all data_process start")
        try:
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
            #activate only this data process just for coverage
            self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        contents = "this is the lookup table  contents, replace with a file..."
        att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII)
        deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att)
        log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment)

        log.debug("TestDataProductProvenance: create L0 all data_process return")


        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1 Conductivity data_process start")
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_conductivity_dprocdef_id, [ctd_l0_conductivity_output_dp_id], {'conductivity':ctd_l1_conductivity_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)




        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id], {'pressure':ctd_l1_pressure_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L1_Pressure data_process start")
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_temperature_dprocdef_id, [ctd_l0_temperature_output_dp_id], {'temperature':ctd_l1_temperature_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_salinity data_process start")
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_salinity_dprocdef_id, [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], {'salinity':ctd_l2_salinity_output_dp_id})
            self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)


        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug("TestDataProductProvenance: create L2_Density data_process start")
        try:
            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_density_dprocdef_id, [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], {'density':ctd_l2_density_output_dp_id})
            self.dataprocessclient.activate_data_process(l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)



        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
#        self._ia_client = ResourceAgentClient('iaclient', name=inst_agent_instance_obj.agent_process_id,  process=FakeProcess())
#        print 'activate_instrument: got ia client %s', self._ia_client
#        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))


        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id)
        self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id)
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)

        #-------------------------------
        # Retrieve the provenance info for the ctd density data product
        #-------------------------------
        provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id)
        log.debug("TestDataProductProvenance: provenance_dict  %s", str(provenance_dict))

        #validate that products are represented
        self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)])
        self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)])

        density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)])
        self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id])


        #-------------------------------
        # Retrieve the extended resource for this data product
        #-------------------------------
        extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id)
        self.assertEqual(1, len(extended_product.data_processes) )
        self.assertEqual(3, len(extended_product.process_input_data_products) )
#        log.debug("TestDataProductProvenance: DataProduct provenance_product_list  %s", str(extended_product.provenance_product_list))
#        log.debug("TestDataProductProvenance: DataProduct data_processes  %s", str(extended_product.data_processes))
#        log.debug("TestDataProductProvenance: DataProduct process_input_data_products  %s", str(extended_product.process_input_data_products))
#        log.debug("TestDataProductProvenance: provenance  %s", str(extended_product.computed.provenance.value))

        #-------------------------------
        # Retrieve the extended resource for this data process
        #-------------------------------
        extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)

#        log.debug("TestDataProductProvenance: DataProcess extended_process_def  %s", str(extended_process_def))
#        log.debug("TestDataProductProvenance: DataProcess data_processes  %s", str(extended_process_def.data_processes))
#        log.debug("TestDataProductProvenance: DataProcess data_products  %s", str(extended_process_def.data_products))
        self.assertEqual(1, len(extended_process_def.data_processes) )
        self.assertEqual(3, len(extended_process_def.output_stream_definitions) )
        self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process

        #-------------------------------
        # Request the xml report
        #-------------------------------
        results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id)


        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dpmsclient.delete_data_product(ctd_parsed_data_product)
        self.dpmsclient.delete_data_product(log_data_product_id)
        self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id)
        self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
Example #53
0
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(name='example eda',handler_module=self.DVR_CONFIG['dvr_mod'],
            handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance(name='example eda instance')
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst,
            external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(name='example data provider', institution=Institution(),
            contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(name='example datasource', protocol_type='DAP', institution=Institution(),
            contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name,
            dataset_description=DatasetDescription(),
            update_description=UpdateDescription(),
            contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier', ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        #dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        pc_list = []

        #Get 'time' parameter context
        pc_list.append(dms_cli.read_parameter_context_by_name('time', id_only=True))

        for pc_k, pc in self._create_parameter_dictionary().iteritems():
            pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('netcdf_param_dict', pc_list)

        #create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="netcdf", description="netcdf", parameter_dictionary_id=pdict_id)

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dprod = IonObject(RT.DataProduct,
            name='usgs_parsed_product',
            description='parsed usgs product',
            temporal_domain=tdom,
            spatial_domain=sdom)

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
            stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='usgs', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': streamdef_id,
            'data_producer_id': dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 1,
            }
Example #54
0
    def test_activateInstrumentSample(self):

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        raw_config = StreamConfiguration(stream_name='raw',
                                         parameter_dictionary_name='raw')
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=DRV_URI_GOOD,
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        instDevice_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)
        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config,
                                          alerts=[])

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubcli.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'raw', id_only=True)
        raw_stream_def_id = self.pubsubcli.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', data_product_id1)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', data_product_id2)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        # setup notifications for the device and parsed data product
        user_id_1 = self._create_notification(user_name='user_1',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id1)
        #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users ---------
        user_id_2 = self._create_notification(user_name='user_2',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams2 = %s', str(stream_ids))

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id2 = %s', dataset_ids[0])
        self.raw_dataset = dataset_ids[0]

        #elastic search debug
        es_indexes, _ = self.container.resource_registry.find_resources(
            restype='ElasticSearchIndex')
        log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes])
        log.debug('Bootstrap %s', CFG.bootstrap.use_es)

        def start_instrument_agent():
            self.imsclient.start_instrument_agent_instance(
                instrument_agent_instance_id=instAgentInstance_id)

        gevent.joinall([gevent.spawn(start_instrument_agent)])

        #cleanup
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     instDevice_id, ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(instDevice_id,
                                              to_name=gate.process_id,
                                              process=FakeProcess())

        log.debug("test_activateInstrumentSample: got ia client %s",
                  str(self._ia_client))

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: initialize %s", str(retval))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.INACTIVE, state)

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrument: return value from go_active %s",
                  str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.IDLE, state)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active command %s",
            str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: run %s", str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=ResourceAgentEvent.PAUSE)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.STOPPED, state)

        cmd = AgentCommand(command=ResourceAgentEvent.RESUME)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=ResourceAgentEvent.CLEAR)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.IDLE, state)

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE)
        for i in xrange(10):
            retval = self._ia_client.execute_resource(cmd)
            log.debug("test_activateInstrumentSample: return from sample %s",
                      str(retval))

        log.debug("test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s",
                  str(reply))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset)
        self.assertIsInstance(replay_data_raw, Granule)
        rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw)
        log.debug("RDT raw: %s", str(rdt_raw.pretty_print()))

        self.assertIn('raw', rdt_raw)
        raw_vals = rdt_raw['raw']

        all_raw = "".join(raw_vals)

        # look for 't' entered after a prompt -- ">t"
        t_commands = all_raw.count(">t")

        if 10 != t_commands:
            log.error("%s raw_vals: ", len(raw_vals))
            for i, r in enumerate(raw_vals):
                log.error("raw val %s: %s", i, [r])
            self.fail("Expected 10 't' strings in raw_vals, got %s" %
                      t_commands)
        else:
            log.debug("%s raw_vals: ", len(raw_vals))
            for i, r in enumerate(raw_vals):
                log.debug("raw val %s: %s", i, [r])

        replay_data_parsed = self.dataretrieverclient.retrieve(
            self.parsed_dataset)
        self.assertIsInstance(replay_data_parsed, Granule)
        rdt_parsed = RecordDictionaryTool.load_from_granule(replay_data_parsed)
        log.debug("test_activateInstrumentSample: RDT parsed: %s",
                  str(rdt_parsed.pretty_print()))
        self.assertIn('temp', rdt_parsed)
        temp_vals = rdt_parsed['temp']
        pressure_vals = rdt_parsed['pressure']
        if 10 != len(temp_vals):
            log.error("%s temp_vals: %s", len(temp_vals), temp_vals)
            self.fail("Expected 10 temp_vals, got %s" % len(temp_vals))

        log.debug("l4-ci-sa-rq-138")
        """
        Physical resource control shall be subject to policy

        Instrument management control capabilities shall be subject to policy

        The actor accessing the control capabilities must be authorized to send commands.

        note from maurice 2012-05-18: Talk to tim M to verify that this is policy.  If it is then talk with Stephen to
                                      get an example of a policy test and use that to create a test stub that will be
                                      completed when we have instrument policies.

        Tim M: The "actor", aka observatory operator, will access the instrument through ION.

        """

        #--------------------------------------------------------------------------------
        # Get the extended data product to see if it contains the granules
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id1, user_id=user_id_1)

        def poller(extended_product):
            return len(extended_product.computed.user_notification_requests.
                       value) == 1

        poll(poller, extended_product, timeout=30)

        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id1,
            extended_data_product=extended_product)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_1)

        #--------------------------------------------------------------------------------
        # For the second user, check the extended data product and the extended intrument
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id2, user_id=user_id_2)
        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id2,
            extended_data_product=extended_product)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_2)
        self._check_computed_attributes_of_extended_instrument(
            expected_instrument_device_id=instDevice_id,
            extended_instrument=extended_instrument)

        #--------------------------------------------------------------------------------
        # Deactivate loggers
        #--------------------------------------------------------------------------------

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        self.dpclient.delete_data_product(data_product_id1)
        self.dpclient.delete_data_product(data_product_id2)
Example #55
0
    def test_activateInstrumentSample(self):

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        #Create stream alarms
        """
        test_two_sided_interval
        Test interval alarm and alarm event publishing for a closed
        inteval.
        """

        #        kwargs = {
        #            'name' : 'test_sim_warning',
        #            'stream_name' : 'parsed',
        #            'value_id' : 'temp',
        #            'message' : 'Temperature is above test range of 5.0.',
        #            'type' : StreamAlarmType.WARNING,
        #            'upper_bound' : 5.0,
        #            'upper_rel_op' : '<'
        #        }

        kwargs = {
            'name': 'temperature_warning_interval',
            'stream_name': 'parsed',
            'value_id': 'temp',
            'message':
            'Temperature is below the normal range of 50.0 and above.',
            'type': StreamAlarmType.WARNING,
            'lower_bound': 50.0,
            'lower_rel_op': '<'
        }

        # Create alarm object.
        alarm = {}
        alarm['type'] = 'IntervalAlarmDef'
        alarm['kwargs'] = kwargs

        raw_config = StreamConfiguration(
            stream_name='raw',
            parameter_dictionary_name='ctd_raw_param_dict',
            records_per_granule=2,
            granule_publish_rate=5)
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict',
            records_per_granule=2,
            granule_publish_rate=5,
            alarms=[alarm])

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=
            "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1a-py2.7.egg",
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        instDevice_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)

        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubcli.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.pubsubcli.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', data_product_id1)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', data_product_id2)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        # setup notifications for the device and parsed data product
        user_id_1 = self._create_notification(user_name='user_1',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id1)
        #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users ---------
        user_id_2 = self._create_notification(user_name='user_2',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams2 = %s', str(stream_ids))

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id2 = %s', dataset_ids[0])
        self.raw_dataset = dataset_ids[0]

        #elastic search debug
        es_indexes, _ = self.container.resource_registry.find_resources(
            restype='ElasticSearchIndex')
        log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes])
        log.debug('Bootstrap %s', CFG.bootstrap.use_es)

        def start_instrument_agent():
            self.imsclient.start_instrument_agent_instance(
                instrument_agent_instance_id=instAgentInstance_id)

        gevent.joinall([gevent.spawn(start_instrument_agent)])

        #setup a subscriber to alarm events from the device
        self._events_received = []
        self._event_count = 0
        self._samples_out_of_range = 0
        self._samples_complete = False
        self._async_sample_result = AsyncResult()

        def consume_event(*args, **kwargs):
            log.debug(
                'TestActivateInstrument recieved ION event: args=%s, kwargs=%s, event=%s.',
                str(args), str(kwargs), str(args[0]))
            self._events_received.append(args[0])
            self._event_count = len(self._events_received)
            self._async_sample_result.set()

        self._event_subscriber = EventSubscriber(
            event_type=
            'StreamWarningAlarmEvent',  #'StreamWarningAlarmEvent', #  StreamAlarmEvent
            callback=consume_event,
            origin=instDevice_id)
        self._event_subscriber.start()

        #cleanup
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        def stop_subscriber():
            self._event_subscriber.stop()
            self._event_subscriber = None

        self.addCleanup(stop_subscriber)

        #wait for start
        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = ProcessStateGate(self.processdispatchclient.read_process,
                                inst_agent_instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            inst_agent_instance_obj.agent_process_id)

        log.debug('Instrument agent instance obj: = %s',
                  str(inst_agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(
            instDevice_id,
            to_name=inst_agent_instance_obj.agent_process_id,
            process=FakeProcess())

        log.debug("test_activateInstrumentSample: got ia client %s",
                  str(self._ia_client))

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: initialize %s", str(retval))
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.INACTIVE)

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrument: return value from go_active %s",
                  str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.IDLE)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active command %s",
            str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: run %s", str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=ResourceAgentEvent.PAUSE)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.STOPPED)

        cmd = AgentCommand(command=ResourceAgentEvent.RESUME)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=ResourceAgentEvent.CLEAR)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.IDLE)

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE)
        for i in xrange(10):
            retval = self._ia_client.execute_resource(cmd)
            log.debug("test_activateInstrumentSample: return from sample %s",
                      str(retval))

        log.debug("test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s",
                  str(reply))

        self._samples_complete = True

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.dataretrieverclient.retrieve(self.parsed_dataset)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        log.debug("test_activateInstrumentSample: RDT parsed: %s",
                  str(rdt.pretty_print()))
        temp_vals = rdt['temp']
        self.assertEquals(len(temp_vals), 10)
        log.debug("test_activateInstrumentSample: all temp_vals: %s",
                  temp_vals)

        #out_of_range_temp_vals = [i for i in temp_vals if i > 5]
        out_of_range_temp_vals = [i for i in temp_vals if i < 50.0]
        log.debug("test_activateInstrumentSample: Out_of_range_temp_vals: %s",
                  out_of_range_temp_vals)
        self._samples_out_of_range = len(out_of_range_temp_vals)

        # if no bad values were produced, then do not wait for an event
        if self._samples_out_of_range == 0:
            self._async_sample_result.set()

        log.debug("test_activateInstrumentSample: _events_received: %s",
                  self._events_received)
        log.debug("test_activateInstrumentSample: _event_count: %s",
                  self._event_count)

        self._async_sample_result.get(timeout=CFG.endpoint.receive.timeout)

        replay_data = self.dataretrieverclient.retrieve(self.raw_dataset)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        log.debug("RDT raw: %s", str(rdt.pretty_print()))

        raw_vals = rdt['raw']
        self.assertEquals(len(raw_vals), 10)

        log.debug("l4-ci-sa-rq-138")
        """
        Physical resource control shall be subject to policy

        Instrument management control capabilities shall be subject to policy

        The actor accessing the control capabilities must be authorized to send commands.

        note from maurice 2012-05-18: Talk to tim M to verify that this is policy.  If it is then talk with Stephen to
                                      get an example of a policy test and use that to create a test stub that will be
                                      completed when we have instrument policies.

        Tim M: The "actor", aka observatory operator, will access the instrument through ION.

        """

        #--------------------------------------------------------------------------------
        # Get the extended data product to see if it contains the granules
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id1, user_id=user_id_1)

        def poller(extended_product):
            return len(extended_product.computed.user_notification_requests.
                       value) == 1

        poll(poller, extended_product, timeout=30)

        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id1,
            extended_data_product=extended_product)

        #--------------------------------------------------------------------------------
        #put some events into the eventsdb to test - this should set the comms and data status to WARNING
        #--------------------------------------------------------------------------------

        t = get_ion_ts()
        self.event_publisher.publish_event(ts_created=t,
                                           event_type='DeviceStatusEvent',
                                           origin=instDevice_id,
                                           state=DeviceStatusType.OUT_OF_RANGE,
                                           values=[200])
        self.event_publisher.publish_event(
            ts_created=t,
            event_type='DeviceCommsEvent',
            origin=instDevice_id,
            state=DeviceCommsType.DATA_DELIVERY_INTERRUPTION,
            lapse_interval_seconds=20)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_1)
        self._check_computed_attributes_of_extended_instrument(
            expected_instrument_device_id=instDevice_id,
            extended_instrument=extended_instrument)

        #--------------------------------------------------------------------------------
        # For the second user, check the extended data product and the extended intrument
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id2, user_id=user_id_2)
        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id2,
            extended_data_product=extended_product)

        #---------- Put some events into the eventsdb to test - this should set the comms and data status to WARNING  ---------

        t = get_ion_ts()
        self.event_publisher.publish_event(ts_created=t,
                                           event_type='DeviceStatusEvent',
                                           origin=instDevice_id,
                                           state=DeviceStatusType.OUT_OF_RANGE,
                                           values=[200])
        self.event_publisher.publish_event(
            ts_created=t,
            event_type='DeviceCommsEvent',
            origin=instDevice_id,
            state=DeviceCommsType.DATA_DELIVERY_INTERRUPTION,
            lapse_interval_seconds=20)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_2)
        self._check_computed_attributes_of_extended_instrument(
            expected_instrument_device_id=instDevice_id,
            extended_instrument=extended_instrument)

        #--------------------------------------------------------------------------------
        # Deactivate loggers
        #--------------------------------------------------------------------------------

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        self.dpclient.delete_data_product(data_product_id1)
        self.dpclient.delete_data_product(data_product_id2)
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(handler_module=self.DVR_CONFIG['dvr_mod'],
            handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'slocum_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
        dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
        dset.dataset_description.parameters['date_pattern'] = '%Y %j'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
            'c_wpt_y_lmc',
            'sci_water_cond',
            'm_y_lmc',
            'u_hd_fin_ap_inflection_holdoff',
            'sci_m_present_time',
            'm_leakdetect_voltage_forward',
            'sci_bb3slo_b660_scaled',
            'c_science_send_all',
            'm_gps_status',
            'm_water_vx',
            'm_water_vy',
            'c_heading',
            'sci_fl3slo_chlor_units',
            'u_hd_fin_ap_gain',
            'm_vacuum',
            'u_min_water_depth',
            'm_gps_lat',
            'm_veh_temp',
            'f_fin_offset',
            'u_hd_fin_ap_hardover_holdoff',
            'c_alt_time',
            'm_present_time',
            'm_heading',
            'sci_bb3slo_b532_scaled',
            'sci_fl3slo_cdom_units',
            'm_fin',
            'x_cycle_overrun_in_ms',
            'sci_water_pressure',
            'u_hd_fin_ap_igain',
            'sci_fl3slo_phyco_units',
            'm_battpos',
            'sci_bb3slo_b470_scaled',
            'm_lat',
            'm_gps_lon',
            'sci_ctd41cp_timestamp',
            'm_pressure',
            'c_wpt_x_lmc',
            'c_ballast_pumped',
            'x_lmc_xy_source',
            'm_lon',
            'm_avg_speed',
            'sci_water_temp',
            'u_pitch_ap_gain',
            'm_roll',
            'm_tot_num_inflections',
            'm_x_lmc',
            'u_pitch_ap_deadband',
            'm_final_water_vy',
            'm_final_water_vx',
            'm_water_depth',
            'm_leakdetect_voltage',
            'u_pitch_max_delta_battpos',
            'm_coulomb_amphr',
            'm_pitch', ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='slocum_model')
        #        dsrc_model.model = 'SLOCUM'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in self._create_parameter_dictionary().iteritems():
            pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('slocum_param_dict', pc_list)

        streamdef_id = pubsub_cli.create_stream_definition(name="slocum_stream_def", description="stream def for slocum testing", parameter_dictionary_id=pdict_id)

        #        dpms_cli.create_data_product()

        # Generate the data product and associate it to the ExternalDataset

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dprod = IonObject(RT.DataProduct,
            name='slocum_parsed_product',
            description='parsed slocum product',
            temporal_domain=tdom,
            spatial_domain=sdom)

        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
            stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='slocum', stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': streamdef_id,
            'external_dataset_res': dset,
            'data_producer_id': dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 20,
            }
Example #57
0
    def test_prepare_resource_support(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """

        #stuff we control
        instrument_agent_instance_id, _ = self.RR.create(
            any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice))
        platform_agent_instance_id, _ = self.RR.create(
            any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent))
        platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel))

        instrument_device2_id, _ = self.RR.create(any_old(RT.InstrumentDevice))
        instrument_device3_id, _ = self.RR.create(any_old(RT.InstrumentDevice))

        platform_device2_id, _ = self.RR.create(any_old(RT.PlatformDevice))
        sensor_device2_id, _ = self.RR.create(any_old(RT.SensorDevice))

        #stuff we associate to
        data_producer_id, _ = self.RR.create(any_old(RT.DataProducer))
        org_id, _ = self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target

        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id,
                                   PRED.hasAgentDefinition,
                                   instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance,
                                   instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer,
                                   data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice,
                                   sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource,
                                   instrument_device_id)

        self.RR.create_association(instrument_device2_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(org_id, PRED.hasResource,
                                   instrument_device2_id)

        instrument_model_id  #is only a target

        platform_agent_instance_id  #is only a target

        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel,
                                   platform_model_id)
        self.RR.create_association(platform_agent_instance_id,
                                   PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel,
                                   platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance,
                                   platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice,
                                   instrument_device_id)

        self.RR.create_association(platform_device2_id, PRED.hasModel,
                                   platform_model_id)
        self.RR.create_association(platform_device2_id, PRED.hasDevice,
                                   instrument_device2_id)

        platform_model_id  #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel,
                                   sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice,
                                   instrument_device_id)

        self.RR.create_association(sensor_device2_id, PRED.hasModel,
                                   sensor_model_id)
        self.RR.create_association(sensor_device2_id, PRED.hasDevice,
                                   instrument_device2_id)

        sensor_model_id  #is only a target

        #set lcstate - used for testing prepare - not setting all to DEVELOP, only some
        self.RR.execute_lifecycle_transition(instrument_agent_id, LCE.DEVELOP)
        self.RR.execute_lifecycle_transition(instrument_device_id, LCE.DEVELOP)
        self.RR.execute_lifecycle_transition(instrument_device2_id,
                                             LCE.DEVELOP)
        self.RR.execute_lifecycle_transition(platform_device_id, LCE.DEVELOP)
        self.RR.execute_lifecycle_transition(platform_device2_id, LCE.DEVELOP)
        self.RR.execute_lifecycle_transition(platform_agent_id, LCE.DEVELOP)

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           processing_level_code='Parsed_Canonical',
                           temporal_domain=tdom,
                           spatial_domain=sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(
            name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id,
                                      data_product_id=data_product_id1)

        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)

        #Testing multiple instrument owners
        addInstOwner(
            instrument_device_id,
            "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(
            instrument_device_id,
            "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        def ion_object_encoder(obj):
            return obj.__dict__

        #First call to create
        instrument_data = self.IMS.prepare_instrument_device_support()

        #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2)

        self.assertEqual(instrument_data._id, '')
        self.assertEqual(instrument_data.type_,
                         OT.InstrumentDevicePrepareSupport)
        self.assertEqual(
            len(instrument_data.associations['InstrumentModel'].resources), 1)
        self.assertEqual(
            instrument_data.associations['InstrumentModel'].resources[0]._id,
            instrument_model_id)
        self.assertEqual(
            len(instrument_data.associations['InstrumentAgentInstance'].
                resources), 1)
        self.assertEqual(
            instrument_data.associations['InstrumentAgentInstance'].
            resources[0]._id, instrument_agent_instance_id)
        self.assertEqual(
            len(instrument_data.associations['InstrumentModel'].
                associated_resources), 0)
        self.assertEqual(
            len(instrument_data.associations['InstrumentAgentInstance'].
                associated_resources), 0)
        self.assertEqual(
            len(instrument_data.associations['SensorDevice'].resources), 0)

        #Next call to update
        instrument_data = self.IMS.prepare_instrument_device_support(
            instrument_device_id)

        #print 'Update results'
        #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2)

        self.assertEqual(instrument_data._id, instrument_device_id)
        self.assertEqual(instrument_data.type_,
                         OT.InstrumentDevicePrepareSupport)
        self.assertEqual(
            len(instrument_data.associations['InstrumentModel'].resources), 1)
        self.assertEqual(
            instrument_data.associations['InstrumentModel'].resources[0]._id,
            instrument_model_id)
        self.assertEqual(
            len(instrument_data.associations['InstrumentAgentInstance'].
                resources), 1)
        self.assertEqual(
            instrument_data.associations['InstrumentAgentInstance'].
            resources[0]._id, instrument_agent_instance_id)
        self.assertEqual(
            len(instrument_data.associations['InstrumentModel'].
                associated_resources), 1)
        self.assertEqual(
            instrument_data.associations['InstrumentModel'].
            associated_resources[0].s, instrument_device_id)
        self.assertEqual(
            instrument_data.associations['InstrumentModel'].
            associated_resources[0].o, instrument_model_id)
        self.assertEqual(
            len(instrument_data.associations['InstrumentAgentInstance'].
                associated_resources), 1)
        self.assertEqual(
            instrument_data.associations['InstrumentAgentInstance'].
            associated_resources[0].o, instrument_agent_instance_id)
        self.assertEqual(
            instrument_data.associations['InstrumentAgentInstance'].
            associated_resources[0].s, instrument_device_id)
        self.assertEqual(
            len(instrument_data.associations['SensorDevice'].resources), 1)
        self.assertEqual(
            instrument_data.associations['SensorDevice'].resources[0]._id,
            sensor_device_id)
        self.assertEqual(
            len(instrument_data.associations['SensorDevice'].
                associated_resources), 1)
        self.assertEqual(
            instrument_data.associations['SensorDevice'].
            associated_resources[0].o, instrument_device_id)
        self.assertEqual(
            instrument_data.associations['SensorDevice'].
            associated_resources[0].s, sensor_device_id)
        self.assertEqual(
            instrument_data.associations['InstrumentModel'].assign_request.
            request_parameters['instrument_device_id'], instrument_device_id)

        #test prepare for create of instrument agent instance
        instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support(
        )

        #print 'Update results'
        #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2)

        self.assertEqual(instrument_agent_data._id, '')
        self.assertEqual(instrument_agent_data.type_,
                         OT.InstrumentAgentInstancePrepareSupport)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentDevice'].
                resources), 2)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentAgent'].resources
                ), 1)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentAgent'].resources[0].
            _id, instrument_agent_id)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentDevice'].
                associated_resources), 0)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentAgent'].
                associated_resources), 0)

        #test prepare for update of instrument agent instance to see if it is associated with the instrument that was created
        instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support(
            instrument_agent_instance_id=instrument_agent_instance_id)

        #print 'Update results'
        #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2)

        self.assertEqual(instrument_agent_data._id,
                         instrument_agent_instance_id)
        self.assertEqual(instrument_agent_data.type_,
                         OT.InstrumentAgentInstancePrepareSupport)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentDevice'].
                resources), 3)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentAgent'].resources
                ), 1)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentAgent'].resources[0].
            _id, instrument_agent_id)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentDevice'].
                associated_resources), 1)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentDevice'].
            associated_resources[0].s, instrument_device_id)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentDevice'].
            associated_resources[0].o, instrument_agent_instance_id)
        self.assertEqual(
            len(instrument_agent_data.associations['InstrumentAgent'].
                associated_resources), 1)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentAgent'].
            associated_resources[0].o, instrument_agent_id)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentAgent'].
            associated_resources[0].s, instrument_agent_instance_id)
        self.assertEqual(
            instrument_agent_data.associations['InstrumentAgent'].
            assign_request.request_parameters['instrument_agent_instance_id'],
            instrument_agent_instance_id)

        #test prepare for update of data product to see if it is associated with the instrument that was created
        data_product_data = self.DP.prepare_data_product_support(
            data_product_id1)

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent=2)

        self.assertEqual(data_product_data._id, data_product_id1)
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].resources),
            1)

        self.assertEqual(
            len(data_product_data.associations['Dataset'].resources), 0)

        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].
                associated_resources), 1)
        self.assertEqual(
            data_product_data.associations['StreamDefinition'].
            associated_resources[0].s, data_product_id1)

        self.assertEqual(
            len(data_product_data.associations['Dataset'].associated_resources
                ), 0)

        self.assertEqual(
            len(data_product_data.
                associations['InstrumentDeviceHasOutputProduct'].resources), 3)

        self.assertEqual(
            len(data_product_data.associations[
                'InstrumentDeviceHasOutputProduct'].associated_resources), 1)
        self.assertEqual(
            data_product_data.associations['InstrumentDeviceHasOutputProduct'].
            associated_resources[0].s, instrument_device_id)
        self.assertEqual(
            data_product_data.associations['InstrumentDeviceHasOutputProduct'].
            associated_resources[0].o, data_product_id1)

        self.assertEqual(
            len(data_product_data.associations['PlatformDevice'].resources), 2)

        platform_data = self.IMS.prepare_platform_device_support()

        #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2)

        self.assertEqual(platform_data._id, '')
        self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport)
        self.assertEqual(
            len(platform_data.associations['PlatformModel'].resources), 1)
        self.assertEqual(
            platform_data.associations['PlatformModel'].resources[0]._id,
            platform_model_id)
        self.assertEqual(
            len(platform_data.associations['PlatformAgentInstance'].resources),
            1)
        self.assertEqual(
            platform_data.associations['PlatformAgentInstance'].resources[0].
            _id, platform_agent_instance_id)
        self.assertEqual(
            len(platform_data.associations['PlatformModel'].
                associated_resources), 0)
        self.assertEqual(
            len(platform_data.associations['PlatformAgentInstance'].
                associated_resources), 0)
        self.assertEqual(
            len(platform_data.associations['InstrumentDevice'].resources), 1)

        platform_data = self.IMS.prepare_platform_device_support(
            platform_device_id)

        #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2)

        self.assertEqual(platform_data._id, platform_device_id)
        self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport)
        self.assertEqual(
            len(platform_data.associations['PlatformModel'].resources), 1)
        self.assertEqual(
            platform_data.associations['PlatformModel'].resources[0]._id,
            platform_model_id)
        self.assertEqual(
            len(platform_data.associations['PlatformAgentInstance'].resources),
            1)
        self.assertEqual(
            platform_data.associations['PlatformAgentInstance'].resources[0].
            _id, platform_agent_instance_id)
        self.assertEqual(
            len(platform_data.associations['PlatformModel'].
                associated_resources), 1)
        self.assertEqual(
            platform_data.associations['PlatformModel'].
            associated_resources[0].s, platform_device_id)
        self.assertEqual(
            platform_data.associations['PlatformModel'].
            associated_resources[0].o, platform_model_id)
        self.assertEqual(
            len(platform_data.associations['PlatformAgentInstance'].
                associated_resources), 1)
        self.assertEqual(
            platform_data.associations['PlatformAgentInstance'].
            associated_resources[0].o, platform_agent_instance_id)
        self.assertEqual(
            platform_data.associations['PlatformAgentInstance'].
            associated_resources[0].s, platform_device_id)
        self.assertEqual(
            len(platform_data.associations['InstrumentDevice'].resources), 2)
        #self.assertEqual(len(platform_data.associations['InstrumentDevice'].associated_resources), 1)
        #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].s, platform_device_id)
        #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].o, instrument_device_id)
        self.assertEqual(
            platform_data.associations['PlatformModel'].assign_request.
            request_parameters['platform_device_id'], platform_device_id)

        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        self.RR2.pluck(instrument_agent_id)
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(instrument_device_id)
        self.RR2.pluck(platform_agent_id)
        self.RR2.pluck(sensor_device_id)
        self.RR2.pluck(sensor_device2_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_instrument_device(instrument_device2_id)
        self.IMS.force_delete_platform_agent_instance(
            platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_device(platform_device2_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_device(sensor_device2_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)
Example #58
0
    def test_resources_associations_extensions(self):
        """
        create one of each resource and association used by IMS
        to guard against problems in ion-definitions
        """

        #stuff we control
        instrument_agent_instance_id, _ = self.RR.create(
            any_old(RT.InstrumentAgentInstance))
        instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent))
        instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel))
        instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice))
        instrument_site_id, _ = self.RR.create(any_old(RT.InstrumentSite))
        platform_agent_instance_id, _ = self.RR.create(
            any_old(RT.PlatformAgentInstance))
        platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent))
        platform_site_id, _ = self.RR.create(any_old(RT.PlatformSite))
        platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice))
        platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel))
        sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice))
        sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel))

        #stuff we associate to
        data_producer_id, _ = self.RR.create(any_old(RT.DataProducer))
        org_id, _ = self.RR.create(any_old(RT.Org))

        #instrument_agent_instance_id #is only a target

        #instrument_agent
        self.RR.create_association(instrument_agent_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(instrument_agent_instance_id,
                                   PRED.hasAgentDefinition,
                                   instrument_agent_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(instrument_device_id, PRED.hasAgentInstance,
                                   instrument_agent_instance_id)
        self.RR.create_association(instrument_device_id, PRED.hasDataProducer,
                                   data_producer_id)
        self.RR.create_association(instrument_device_id, PRED.hasDevice,
                                   sensor_device_id)
        self.RR.create_association(org_id, PRED.hasResource,
                                   instrument_device_id)

        instrument_model_id  #is only a target

        platform_agent_instance_id  #is only a target

        #platform_agent
        self.RR.create_association(platform_agent_id, PRED.hasModel,
                                   platform_model_id)
        self.RR.create_association(platform_agent_instance_id,
                                   PRED.hasAgentDefinition, platform_agent_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel,
                                   platform_model_id)
        self.RR.create_association(platform_device_id, PRED.hasAgentInstance,
                                   platform_agent_instance_id)
        self.RR.create_association(platform_device_id, PRED.hasDevice,
                                   instrument_device_id)

        self.RR.create_association(instrument_site_id, PRED.hasDevice,
                                   instrument_device_id)
        self.RR.create_association(platform_site_id, PRED.hasDevice,
                                   platform_device_id)
        self.RR.create_association(platform_site_id, PRED.hasSite,
                                   instrument_site_id)

        platform_model_id  #is only a target

        #sensor_device
        self.RR.create_association(sensor_device_id, PRED.hasModel,
                                   sensor_model_id)
        self.RR.create_association(sensor_device_id, PRED.hasDevice,
                                   instrument_device_id)

        sensor_model_id  #is only a target

        #create a parsed product for this instrument output
        tdom, sdom = time_series_domain()
        tdom = tdom.dump()
        sdom = sdom.dump()
        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           processing_level_code='Parsed_Canonical',
                           temporal_domain=tdom,
                           spatial_domain=sdom)
        pdict_id = self.DSC.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.PSC.create_stream_definition(
            name='parsed', parameter_dictionary_id=pdict_id)
        data_product_id1 = self.DP.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', data_product_id1)

        self.DAMS.assign_data_product(input_resource_id=instrument_device_id,
                                      data_product_id=data_product_id1)

        def addInstOwner(inst_id, subject):

            actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject})
            user_id = self.IDS.create_actor_identity(actor_identity_obj)
            user_info_obj = any_old(RT.UserInfo)
            user_info_id = self.IDS.create_user_info(user_id, user_info_obj)

            self.RR.create_association(inst_id, PRED.hasOwner, user_id)

        #Testing multiple instrument owners
        addInstOwner(
            instrument_device_id,
            "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254")
        addInstOwner(
            instrument_device_id,
            "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256")

        extended_instrument = self.IMS.get_instrument_device_extension(
            instrument_device_id)

        self.assertEqual(instrument_device_id, extended_instrument._id)
        self.assertEqual(len(extended_instrument.owners), 2)
        self.assertEqual(extended_instrument.instrument_model._id,
                         instrument_model_id)

        # Lifecycle
        self.assertEquals(len(extended_instrument.lcstate_transitions), 5)
        self.assertEquals(
            set(extended_instrument.lcstate_transitions.keys()),
            set(['develop', 'deploy', 'retire', 'plan', 'integrate']))
        self.assertEquals(len(extended_instrument.availability_transitions), 2)
        self.assertEquals(
            set(extended_instrument.availability_transitions.keys()),
            set(['enable', 'announce']))

        # Verify that computed attributes exist for the extended instrument
        self.assertIsInstance(
            extended_instrument.computed.last_data_received_datetime,
            ComputedFloatValue)
        self.assertIsInstance(extended_instrument.computed.uptime,
                              ComputedStringValue)

        self.assertIsInstance(
            extended_instrument.computed.power_status_roll_up,
            ComputedIntValue)
        self.assertIsInstance(
            extended_instrument.computed.communications_status_roll_up,
            ComputedIntValue)
        self.assertIsInstance(extended_instrument.computed.data_status_roll_up,
                              ComputedIntValue)
        self.assertIsInstance(
            extended_instrument.computed.location_status_roll_up,
            ComputedIntValue)

        log.debug("extended_instrument.computed: %s",
                  extended_instrument.computed)

        #check model
        inst_model_obj = self.RR.read(instrument_model_id)
        self.assertEqual(inst_model_obj.name,
                         extended_instrument.instrument_model.name)

        #check agent instance
        inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id)
        self.assertEqual(inst_agent_instance_obj.name,
                         extended_instrument.agent_instance.name)

        #check agent
        inst_agent_obj = self.RR.read(instrument_agent_id)
        #compound assoc return list of lists so check the first element
        self.assertEqual(inst_agent_obj.name,
                         extended_instrument.instrument_agent.name)

        #check platform device
        plat_device_obj = self.RR.read(platform_device_id)
        self.assertEqual(plat_device_obj.name,
                         extended_instrument.platform_device.name)

        extended_platform = self.IMS.get_platform_device_extension(
            platform_device_id)

        self.assertEqual(1, len(extended_platform.portals))
        self.assertEqual(1, len(extended_platform.portal_instruments))
        #self.assertEqual(1, len(extended_platform.computed.portal_status.value)) # no agent started so NO statuses reported
        self.assertEqual(1, len(extended_platform.instrument_devices))
        self.assertEqual(instrument_device_id,
                         extended_platform.instrument_devices[0]._id)
        self.assertEqual(1, len(extended_platform.instrument_models))
        self.assertEqual(instrument_model_id,
                         extended_platform.instrument_models[0]._id)
        self.assertEquals(extended_platform.platform_agent._id,
                          platform_agent_id)

        self.assertEquals(len(extended_platform.lcstate_transitions), 5)
        self.assertEquals(
            set(extended_platform.lcstate_transitions.keys()),
            set(['develop', 'deploy', 'retire', 'plan', 'integrate']))
        self.assertEquals(len(extended_platform.availability_transitions), 2)
        self.assertEquals(
            set(extended_platform.availability_transitions.keys()),
            set(['enable', 'announce']))

        #check sensor devices
        self.assertEqual(1, len(extended_instrument.sensor_devices))

        #check data_product_parameters_set
        self.assertEqual(
            ComputedValueAvailability.PROVIDED,
            extended_instrument.computed.data_product_parameters_set.status)
        self.assertTrue('Parsed_Canonical' in extended_instrument.computed.
                        data_product_parameters_set.value)
        # the ctd parameters should include 'temp'
        self.assertTrue('temp' in extended_instrument.computed.
                        data_product_parameters_set.value['Parsed_Canonical'])

        #none of these will work because there is no agent
        #        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
        #                         extended_instrument.computed.firmware_version.status)
        #        self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
        #                         extended_instrument.computed.operational_state.status)
        #        self.assertEqual(ComputedValueAvailability.PROVIDED,
        #                         extended_instrument.computed.power_status_roll_up.status)
        #        self.assertEqual(ComputedValueAvailability.PROVIDED,
        #                         extended_instrument.computed.communications_status_roll_up.status)
        #        self.assertEqual(ComputedValueAvailability.PROVIDED,
        #                         extended_instrument.computed.data_status_roll_up.status)
        #        self.assertEqual(DeviceStatusType.STATUS_OK,
        #                        extended_instrument.computed.data_status_roll_up.value)
        #        self.assertEqual(ComputedValueAvailability.PROVIDED,
        #                         extended_instrument.computed.location_status_roll_up.status)

        #        self.assertEqual(ComputedValueAvailability.PROVIDED,
        #                         extended_instrument.computed.recent_events.status)
        #        self.assertEqual([], extended_instrument.computed.recent_events.value)

        # cleanup
        c = DotDict()
        c.resource_registry = self.RR
        self.RR2.pluck(instrument_agent_id)
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(instrument_device_id)
        self.RR2.pluck(platform_agent_id)
        self.RR2.pluck(sensor_device_id)
        self.IMS.force_delete_instrument_agent(instrument_agent_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_platform_agent_instance(
            platform_agent_instance_id)
        self.IMS.force_delete_platform_agent(platform_agent_id)
        self.OMS.force_delete_instrument_site(instrument_site_id)
        self.OMS.force_delete_platform_site(platform_site_id)
        self.IMS.force_delete_platform_device(platform_device_id)
        self.IMS.force_delete_platform_model(platform_model_id)
        self.IMS.force_delete_sensor_device(sensor_device_id)
        self.IMS.force_delete_sensor_model(sensor_model_id)

        #stuff we associate to
        self.RR.delete(data_producer_id)
        self.RR.delete(org_id)
    def test_pydap(self):
        if not CFG.get_safe('bootstrap.use_pydap', False):
            raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition(
            'example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(
            dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product,
                        data_product_id)

        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        self.addCleanup(
            self.data_product_management.suspend_data_product_persistence,
            data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id,
                                                         PRED.hasDataset,
                                                         id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt, 10)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.event.wait(10))

        gevent.sleep(
            1)  # Yield to other greenlets, had an issue with connectivity

        pydap_host = CFG.get_safe('server.pydap.host', 'localhost')
        pydap_port = CFG.get_safe('server.pydap.port', 8001)
        url = 'http://%s:%s/%s' % (pydap_host, pydap_port, dataset_id)

        for i in xrange(
                3
        ):  # Do it three times to test that the cache doesn't corrupt the requests/responses
            ds = open_url(url)
            np.testing.assert_array_equal(ds['time'][:], np.arange(10))
            untested = []
            for k, v in rdt.iteritems():
                if k == rdt.temporal_parameter:
                    continue
                context = rdt.context(k)
                if isinstance(context.param_type, QuantityType):
                    np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
                elif isinstance(context.param_type, ArrayType):
                    if context.param_type.inner_encoding is None:
                        values = np.empty(rdt[k].shape, dtype='O')
                        for i, obj in enumerate(rdt[k]):
                            values[i] = str(obj)
                        np.testing.assert_array_equal(ds[k][k][:][0], values)
                    elif len(rdt[k].shape) > 1:
                        values = np.empty(rdt[k].shape[0], dtype='O')
                        for i in xrange(rdt[k].shape[0]):
                            values[i] = ','.join(
                                map(lambda x: str(x), rdt[k][i].tolist()))
                elif isinstance(context.param_type, ConstantType):
                    np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
                elif isinstance(context.param_type, CategoryType):
                    np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
                else:
                    untested.append('%s (%s)' % (k, context.param_type))
            if untested:
                raise AssertionError('Untested parameters: %s' % untested)
Example #60
0
    def test_create_read_delete_event_process(self):
        """
        Test that the CRUD methods for the event processes work correctly
        """

        #---------------------------------------------------------------------------------------------
        # Create a process definition
        #---------------------------------------------------------------------------------------------

        # Create
        module = 'ion.processes.data.transforms.event_alert_transform'
        class_name = 'EventAlertTransform'
        procdef_id = self.event_management.create_event_process_definition(version='ver_1',
            module=module,
            class_name=class_name,
            uri='http://hare.com',
            arguments=['arg1', 'arg2'],
            event_types=['ExampleDetectableEvent', 'type_2'],
            sub_types=['sub_type_1'])

        # Read
        read_process_def = self.event_management.read_event_process_definition(procdef_id)
        self.assertEquals(read_process_def.arguments, ['arg1', 'arg2'])

        #---------------------------------------------------------------------------------------------
        # Use the process definition to create a process
        #---------------------------------------------------------------------------------------------

        # Create a stream
        param_dict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)

        stream_def_id = self.pubsub.create_stream_definition('cond_stream_def', parameter_dictionary_id=param_dict_id)

        tdom, sdom = time_series_domain()
        tdom, sdom = tdom.dump(), sdom.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp',
            temporal_domain = tdom,
            spatial_domain = sdom)

        # Create a data product
        data_product_id = self.data_product_management.create_data_product(data_product=dp_obj, stream_definition_id=stream_def_id)

        output_products = {}
        output_products['conductivity'] = data_product_id

        # Create an event process
        event_process_id = self.event_management.create_event_process(  process_definition_id=procdef_id,
                                                                        event_types=['ExampleDetectableEvent','DetectionEvent'],
                                                                        sub_types=['s1', 's2'],
                                                                        origins=['or_1', 'or_2'],
                                                                        origin_types=['or_t1', 'or_t2'],
                                                                        out_data_products = output_products)
        self.addCleanup(self.process_dispatcher.cancel_process, event_process_id)

        #---------------------------------------------------------------------------------------------
        # Read the event process object and make assertions
        #---------------------------------------------------------------------------------------------                                                                out_data_products={'conductivity': data_product_id})
        event_process_obj = self.event_management.read_event_process(event_process_id=event_process_id)

        # Get the stream associated with the data product for the sake of making assertions
        stream_ids, _ = self.rrc.find_objects(data_product_id, PRED.hasStream, id_only=True)
        stream_id = stream_ids[0]

        # Assertions!
        self.assertEquals(event_process_obj.detail.output_streams['conductivity'], stream_id)
        self.assertEquals(event_process_obj.detail.event_types, ['ExampleDetectableEvent', 'DetectionEvent'])
        self.assertEquals(event_process_obj.detail.sub_types, ['s1', 's2'])
        self.assertEquals(event_process_obj.detail.origins, ['or_1', 'or_2'])
        self.assertEquals(event_process_obj.detail.origin_types, ['or_t1', 'or_t2'])