Ejemplo n.º 1
0
    def test_logger(self):
        data_product_id = self.make_ctd_data_product()
        # Clone the data product so we have an output
        clone_id = self.clone_data_product(data_product_id)

        data_process_id = self.create_data_process_logger(
            data_product_id, clone_id, {'x': 'temp'})

        dataset_monitor = DatasetMonitor(data_product_id=data_product_id)
        self.addCleanup(dataset_monitor.stop)

        # Put some data into the the data product
        rdt = self.ph.rdt_for_data_product(data_product_id)
        rdt['time'] = np.arange(40)
        rdt['temp'] = np.arange(40)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())

        # Watch the output
        dataset_monitor = DatasetMonitor(data_product_id=clone_id)
        self.addCleanup(dataset_monitor.stop)
        # Run the replay
        self.data_process_management.activate_data_process(data_process_id)

        # Make sure data came out
        self.assertTrue(dataset_monitor.wait())
    def test_logger(self):
        data_product_id = self.make_ctd_data_product()
        # Clone the data product so we have an output
        clone_id = self.clone_data_product(data_product_id)

        data_process_id = self.create_data_process_logger(data_product_id, clone_id, {'x':'temp'})
        
        dataset_monitor = DatasetMonitor(data_product_id=data_product_id)
        self.addCleanup(dataset_monitor.stop)

        # Put some data into the the data product
        rdt = self.ph.rdt_for_data_product(data_product_id)
        rdt['time'] = np.arange(40)
        rdt['temp'] = np.arange(40)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())

        # Watch the output
        dataset_monitor = DatasetMonitor(data_product_id=clone_id)
        self.addCleanup(dataset_monitor.stop)
        # Run the replay
        self.data_process_management.activate_data_process(data_process_id)

        # Make sure data came out
        self.assertTrue(dataset_monitor.wait())
    def test_coefficient_compatibility(self):
        data_product_id = self.create_data_product(
            name='Calibration Coefficient Test Data product',
            stream_def_id=self.stream_def_id)

        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        self.addCleanup(
            self.data_product_management.suspend_data_product_persistence,
            data_product_id)

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = [10] * 10
        rdt['cc_coefficient'] = [2] * 10
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())

        rdt2 = RecordDictionaryTool.load_from_granule(
            self.data_retriever.retrieve(dataset_id))
        np.testing.assert_array_equal(rdt2['offset'], [12] * 10)
Ejemplo n.º 4
0
    def test_append_parameter(self):
        # Make a CTDBP Data Product
        data_product_id = self.make_ctd_data_product()
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        # Throw some data in it
        rdt = self.ph.rdt_for_data_product(data_product_id)
        rdt['time'] = np.arange(30)
        rdt['temp'] = np.arange(30)
        rdt['pressure'] = np.arange(30)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.event.clear()

        # Grab the egg
        egg_url = self.egg_url
        egg_path = TransformWorker.download_egg(egg_url)
        import pkg_resources
        pkg_resources.working_set.add_entry(egg_path)
        self.addCleanup(os.remove, egg_path)

        # Make a parameter function
        owner = 'ion_example.add_arrays'
        func = 'add_arrays'
        arglist = ['a', 'b']
        pf = ParameterFunction(name='add_arrays',
                               function_type=PFT.PYTHON,
                               owner=owner,
                               function=func,
                               args=arglist)
        pfunc_id = self.dataset_management.create_parameter_function(pf)
        self.addCleanup(self.dataset_management.delete_parameter_function,
                        pfunc_id)

        # Make a context (instance of the function)
        context = ParameterContext(name='array_sum',
                                   units="1",
                                   fill_value="-9999",
                                   parameter_function_id=pfunc_id,
                                   parameter_type="function",
                                   value_encoding="float32",
                                   display_name="Array Summation",
                                   parameter_function_map={
                                       'a': 'temp',
                                       'b': 'pressure'
                                   })
        #pfunc = DatasetManagementService.get_coverage_function(pf)
        #pfunc.param_map = {'a':'temp', 'b':'pressure'}
        #ctxt = ParameterContext('array_sum', param_type=ParameterFunctionType(pfunc))
        #ctxt_dump = ctxt.dump()
        #ctxt_id = self.dataset_management.create_parameter_context('array_sum', ctxt_dump)
        ctxt_id = self.dataset_management.create_parameter(context)
        self.dataset_management.add_parameter_to_dataset(ctxt_id, dataset_id)

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['array_sum'], np.arange(0, 60, 2))
    def check_tempsf_instrument_data_product(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing: return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['temperature'] = [[ 25.3884, 26.9384, 24.3394, 23.3401, 22.9832,
            29.4434, 26.9873, 15.2883, 16.3374, 14.5883, 15.7253, 18.4383,
            15.3488, 17.2993, 10.2111, 11.5993, 10.9345, 9.4444, 9.9876,
            10.9834, 11.0098, 5.3456, 4.2994, 4.3009]]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['temperature'], [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009]])
        return passing
Ejemplo n.º 6
0
    def test_instrument_simple(self):
        instrument_model_id = self.create_instrument_model()
        instrument_agent_id = self.create_instrument_agent(instrument_model_id)
        instrument_device_id = self.create_instrument_device(instrument_model_id)
        instrument_agent_instance_id = self.create_instrument_agent_instance(instrument_agent_id, instrument_device_id)

        raw_dp_id, parsed_dp_id = self.create_instrument_data_products(instrument_device_id)

        self.start_instrument_agent_instance(instrument_agent_instance_id)

        agent_process_id = self.poll_instrument_agent_instance(instrument_agent_instance_id, instrument_device_id)

        agent_client = ResourceAgentClient(instrument_device_id,
                                              to_name=agent_process_id,
                                              process=FakeProcess())

        self.agent_state_transition(agent_client, ResourceAgentEvent.INITIALIZE, ResourceAgentState.INACTIVE)
        self.agent_state_transition(agent_client, ResourceAgentEvent.GO_ACTIVE, ResourceAgentState.IDLE)
        self.agent_state_transition(agent_client, ResourceAgentEvent.RUN, ResourceAgentState.COMMAND)

        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(parsed_dp_id)
        monitor = DatasetMonitor(dataset_id=dataset_id)

        for i in xrange(10):
            agent_client.execute_resource(AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE))
            if not monitor.wait():
                raise AssertionError('Failed on the %ith granule' % i)
            monitor.reset()

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(dataset_id))
        self.assertEquals(len(rdt), 10)
    def test_instrument_simple(self):
        instrument_model_id = self.create_instrument_model()
        instrument_agent_id = self.create_instrument_agent(instrument_model_id)
        instrument_device_id = self.create_instrument_device(instrument_model_id)
        instrument_agent_instance_id = self.create_instrument_agent_instance(instrument_agent_id, instrument_device_id)

        raw_dp_id, parsed_dp_id = self.create_instrument_data_products(instrument_device_id)

        self.start_instrument_agent_instance(instrument_agent_instance_id)

        agent_process_id = self.poll_instrument_agent_instance(instrument_agent_instance_id, instrument_device_id)

        agent_client = ResourceAgentClient(instrument_device_id,
                                              to_name=agent_process_id,
                                              process=FakeProcess())

        self.agent_state_transition(agent_client, ResourceAgentEvent.INITIALIZE, ResourceAgentState.INACTIVE)
        self.agent_state_transition(agent_client, ResourceAgentEvent.GO_ACTIVE, ResourceAgentState.IDLE)
        self.agent_state_transition(agent_client, ResourceAgentEvent.RUN, ResourceAgentState.COMMAND)

        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(parsed_dp_id)

        for i in xrange(10):
            monitor = DatasetMonitor(dataset_id=dataset_id)
            agent_client.execute_resource(AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE))
            if not monitor.wait():
                raise AssertionError('Failed on the %ith granule' % i)
            monitor.stop()

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(dataset_id))
        self.assertEquals(len(rdt), 10)
    def test_add_parameter_function(self):
        # req-tag: NEW SA - 31
        # Make a CTDBP Data Product
        data_product_id = self.make_ctd_data_product()
        self.data_product_id = data_product_id
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        # Throw some data in it
        rdt = self.ph.rdt_for_data_product(data_product_id)
        rdt['time'] = np.arange(30)
        rdt['temp'] = np.arange(30)
        rdt['pressure'] = np.arange(30)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.event.clear()

        #--------------------------------------------------------------------------------
        # This is what the user defines either via preload or through the UI
        #--------------------------------------------------------------------------------
        # Where the egg is
        egg_url = self.egg_url

        # Make a parameter function
        owner = 'ion_example.add_arrays'
        func = 'add_arrays'
        arglist = ['a', 'b']
        pf = ParameterFunction(name='add_arrays', function_type=PFT.PYTHON, owner=owner, function=func, args=arglist, egg_uri=egg_url)
        pfunc_id = self.dataset_management.create_parameter_function(pf)
        #--------------------------------------------------------------------------------
        self.addCleanup(self.dataset_management.delete_parameter_function, pfunc_id)

        # Make a data process definition
        dpd = DataProcessDefinition(name='add_arrays', description='Sums two arrays')
        dpd_id = self.data_process_management.create_data_process_definition(dpd, pfunc_id)

        # TODO: assert assoc exists
        argmap = {'a':'temp', 'b':'pressure'}
        dp_id = self.data_process_management.create_data_process(dpd_id, [data_product_id], argument_map=argmap, out_param_name='array_sum')

        # Verify that the function worked!
        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['array_sum'], np.arange(0,60,2))
    
        # Verify that we can inspect it as well
        source_code = self.data_process_management.inspect_data_process_definition(dpd_id)
        self.assertEquals(source_code, 'def add_arrays(a, b):\n    return a+b\n')

        url = self.data_process_management.get_data_process_definition_url(dpd_id) 
        self.assertEquals(url, 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg')

        dpd_ids, _ = self.resource_registry.find_resources(name='dataqc_spiketest', restype=RT.DataProcessDefinition, id_only=True)
        dpd_id = dpd_ids[0]
        url = self.data_process_management.get_data_process_definition_url(dpd_id) 
        self.assertEquals(url, 'https://github.com/ooici/ion-functions/blob/master/ion_functions/qc/qc_functions.py')
Ejemplo n.º 9
0
    def create_device_site_deployment(self, dep_name="Deployment", starting=''):
        from interface.objects import StreamConfiguration, StreamConfigurationType, InstrumentDevice
        from interface.objects import InstrumentModel, PlatformAgent, InstrumentSite, TemporalBounds, Deployment
        from interface.objects import RemotePlatformDeploymentContext

        stream_conf     = StreamConfiguration(stream_name="CTD 1 Parsed Stream", parameter_dictionary_name='ctd_parsed_param_dict',  stream_type=StreamConfigurationType.PARSED)
        pdict_id        = self.dataset_management.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict')
        stream_def_id   = self.create_stream_definition(name='CTD 1', parameter_dictionary_id=pdict_id)
        data_product_id = self.create_data_product(name="DDP_1", stream_def_id=stream_def_id, stream_configuration=stream_conf)
        self.activate_data_product(data_product_id)

        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        stream_def = self.resource_registry.find_objects(data_product_id, PRED.hasStreamDefinition)[0][0]
        param_dict = self.resource_registry.find_objects(stream_def._id,  PRED.hasParameterDictionary)[0][0]
        # Add data to the DataProduct
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        rdt  = self.ph.get_rdt(stream_def._id)
        rdt_ = self.ph.rdt_for_data_product(data_product_id)
        self.assertEquals(rdt.fields, rdt_.fields)
        rdt['time'] = [0,   1,  2,  3]
        rdt['temp'] = [10, 11, 12, 13]
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())

        # Create Device
        device = InstrumentDevice(name='Device 1')
        device_id = self.instrument_management.create_instrument_device(device)
        self.data_acquisition_management.register_instrument(device_id)
        self.data_acquisition_management.assign_data_product(device_id, data_product_id)

        # Create Model
        model = InstrumentModel(name='Model 1')
        model_id = self.instrument_management.create_instrument_model(model)
        self.instrument_management.assign_instrument_model_to_instrument_device(model_id, device_id)

        # Create AgentDefinition
        ad       = PlatformAgent(stream_configurations=[stream_conf])
        ad_id, _ = self.resource_registry.create(ad)

        # Create Site
        site = InstrumentSite(name='Site 1', stream_configurations=[stream_conf])
        site_id, _ = self.resource_registry.create(site)
        self.resource_registry.create_association(site_id, PRED.hasModel,           model_id)
        self.resource_registry.create_association(site_id, PRED.hasAgentDefinition, ad_id)

        # TemporalBounds of the Deployment
        temp_bounds = TemporalBounds(start_datetime=starting, end_datetime='')
        # Create Deployment
        deployment     = Deployment(name=dep_name, type="RemotePlatform", context=RemotePlatformDeploymentContext(),
                                    constraint_list=[temp_bounds])
        deployment_id  = self.observatory_management.create_deployment(deployment=deployment, site_id=site_id, device_id=device_id)

        return site_id, device_id, dataset_id, deployment_id, param_dict, data_product_id
    def push_granule(self, data_product_id):
        '''
        Publishes and monitors that the granule arrived
        '''
        datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True)
        dataset_monitor = DatasetMonitor(datasets[0])

        rdt = self.ph.rdt_for_data_product(data_product_id)
        self.ph.fill_parsed_rdt(rdt)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)

        assert dataset_monitor.wait()
        dataset_monitor.stop()
    def test_append_parameter(self):
        # Make a CTDBP Data Product
        data_product_id = self.make_ctd_data_product()
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        # Throw some data in it
        rdt = self.ph.rdt_for_data_product(data_product_id)
        rdt['time'] = np.arange(30)
        rdt['temp'] = np.arange(30)
        rdt['pressure'] = np.arange(30)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.event.clear()

        # Grab the egg
        egg_url = self.egg_url
        egg_path = TransformWorker.download_egg(egg_url)
        import pkg_resources
        pkg_resources.working_set.add_entry(egg_path)
        self.addCleanup(os.remove, egg_path)

        # Make a parameter function
        owner = 'ion_example.add_arrays'
        func = 'add_arrays'
        arglist = ['a', 'b']
        pf = ParameterFunction(name='add_arrays', function_type=PFT.PYTHON, owner=owner, function=func, args=arglist)
        pfunc_id = self.dataset_management.create_parameter_function(pf)
        self.addCleanup(self.dataset_management.delete_parameter_function, pfunc_id)

        # Make a context (instance of the function)
        context = ParameterContext(name='array_sum',
                                   units="1",
                                   fill_value="-9999",
                                   parameter_function_id=pfunc_id,
                                   parameter_type="function",
                                   value_encoding="float32",
                                   display_name="Array Summation",
                                   parameter_function_map={'a':'temp','b':'pressure'})
        #pfunc = DatasetManagementService.get_coverage_function(pf)
        #pfunc.param_map = {'a':'temp', 'b':'pressure'}
        #ctxt = ParameterContext('array_sum', param_type=ParameterFunctionType(pfunc))
        #ctxt_dump = ctxt.dump()
        #ctxt_id = self.dataset_management.create_parameter_context('array_sum', ctxt_dump)
        ctxt_id = self.dataset_management.create_parameter(context)
        self.dataset_management.add_parameter_to_dataset(ctxt_id, dataset_id)

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['array_sum'], np.arange(0,60,2))
    def push_granule(self, data_product_id):
        '''
        Publishes and monitors that the granule arrived
        '''
        datasets, _ = self.rrclient.find_objects(data_product_id,
                                                 PRED.hasDataset,
                                                 id_only=True)
        dataset_monitor = DatasetMonitor(datasets[0])

        rdt = self.ph.rdt_for_data_product(data_product_id)
        self.ph.fill_parsed_rdt(rdt)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)

        assert dataset_monitor.wait()
        dataset_monitor.stop()
    def test_retrieve_process(self):
        data_product_id = self.make_ctd_data_product()
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        rdt = self.ph.rdt_for_data_product(data_product_id)
        date0 = datetime(2014, 1, 1, 0, 0) # 2014-01-01T00:00Z
        time0 = calendar.timegm(date0.timetuple()) + 2208988800 # NTP
        rdt['time'] = np.arange(time0, time0+30)
        rdt['temp'] = np.arange(30)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.event.clear()
        retrieve_process = RetrieveProcess(dataset_id)
        rdt = retrieve_process.retrieve(date0, date0 + timedelta(hours=1))
        np.testing.assert_array_equal(rdt['temp'], np.arange(30))
Ejemplo n.º 14
0
    def test_retrieve_process(self):
        data_product_id = self.make_ctd_data_product()
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        rdt = self.ph.rdt_for_data_product(data_product_id)
        date0 = datetime(2014, 1, 1, 0, 0)  # 2014-01-01T00:00Z
        time0 = calendar.timegm(date0.timetuple()) + 2208988800  # NTP
        rdt['time'] = np.arange(time0, time0 + 30)
        rdt['temp'] = np.arange(30)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.event.clear()
        retrieve_process = RetrieveProcess(dataset_id)
        rdt = retrieve_process.retrieve(date0, date0 + timedelta(hours=1))
        np.testing.assert_array_equal(rdt['temp'], np.arange(30))
    def test_coefficient_compatibility(self):
        data_product_id = self.create_data_product(name='Calibration Coefficient Test Data product', stream_def_id=self.stream_def_id)

        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = [10] * 10
        rdt['cc_coefficient'] = [2] * 10
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())

        rdt2 = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(dataset_id))
        np.testing.assert_array_equal(rdt2['offset'],[12]*10)
Ejemplo n.º 16
0
    def check_presta_instrument_data_products(self, reference_designator):
        # Check the parsed data product make sure it's got everything it needs and can be published persisted etc.

        # Absolute Pressure (SFLPRES_L0) is what comes off the instrumnet, SFLPRES_L1 is a pfunc
        # Let's go ahead and publish some fake data!!!
        # According to https://alfresco.oceanobservatories.org/alfresco/d/d/workspace/SpacesStore/63e16865-9d9e-4b11-b0b3-d5658faa5080/1341-00230_Data_Product_Spec_SFLPRES_OOI.pdf
        # Appendix A. Example 1.
        # p_psia_tide = 14.8670
        # the tide should be 10.2504
        passing = True

        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800.

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['absolute_pressure'] = [14.8670]
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'],
                                                  [10.2504], 4)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(
            dataset_monitor.wait())  # Bumped to 20 to keep buildbot happy
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'],
                                                  [10.2504], 4)
        passing &= self.assert_array_almost_equal(rdt['absolute_pressure'],
                                                  [14.8670], 4)

        return passing
    def check_presta_instrument_data_products(self, reference_designator):
        # Check the parsed data product make sure it's got everything it needs and can be published persisted etc.

        # Absolute Pressure (SFLPRES_L0) is what comes off the instrumnet, SFLPRES_L1 is a pfunc
        # Let's go ahead and publish some fake data!!!
        # According to https://alfresco.oceanobservatories.org/alfresco/d/d/workspace/SpacesStore/63e16865-9d9e-4b11-b0b3-d5658faa5080/1341-00230_Data_Product_Spec_SFLPRES_OOI.pdf
        # Appendix A. Example 1.
        # p_psia_tide = 14.8670
        # the tide should be 10.2504
        passing = True
        

        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800.

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['absolute_pressure'] = [14.8670]
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait()) # Bumped to 20 to keep buildbot happy
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        passing &= self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)

        return passing
Ejemplo n.º 18
0
    def check_tempsf_instrument_data_product(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing: return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['temperature'] = [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009
        ]]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['temperature'], [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009
        ]])
        return passing
Ejemplo n.º 19
0
    def test_pydap(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)


        dp = DataProduct(name='example')

        data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
        
        self.data_product_management.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt,10)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.wait())


        gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity

        pydap_host = CFG.get_safe('server.pydap.host','localhost')
        pydap_port = CFG.get_safe('server.pydap.port',8001)
        url = 'http://%s:%s/%s' %(pydap_host, pydap_port, data_product_id)

        for i in xrange(3): # Do it three times to test that the cache doesn't corrupt the requests/responses
            ds = open_url(url)

            np.testing.assert_array_equal(list(ds['data']['time']), np.arange(10))
            untested = []
            for k,v in rdt.iteritems():
                if k==rdt.temporal_parameter:
                    continue
                context = rdt.context(k)
                if isinstance(context.param_type, QuantityType):
                    np.testing.assert_array_equal(list(ds['data'][k]), rdt[k])
                elif isinstance(context.param_type, ArrayType):
                    if context.param_type.inner_encoding is None:
                        values = np.empty(rdt[k].shape, dtype='O')
                        for i,obj in enumerate(rdt[k]):
                            values[i] = str(obj)
                        np.testing.assert_array_equal(list(ds['data'][k]), values)
                    elif len(rdt[k].shape)>1:
                        values = np.empty(rdt[k].shape[0], dtype='O')
                        for i in xrange(rdt[k].shape[0]):
                            values[i] = ','.join(map(lambda x : str(x), rdt[k][i].tolist()))
                elif isinstance(context.param_type, ConstantType):
                    np.testing.assert_array_equal(list(ds['data'][k]), rdt[k])
                elif isinstance(context.param_type, CategoryType):
                    np.testing.assert_array_equal(list(ds['data'][k]), rdt[k].astype('|S'))
                else:
                    untested.append('%s (%s)' % (k,context.param_type))
            if untested:
                raise AssertionError('Untested parameters: %s' % untested)
Ejemplo n.º 20
0
    def test_add_parameter_function(self):
        # req-tag: NEW SA - 31
        # Make a CTDBP Data Product
        data_product_id = self.make_ctd_data_product()
        self.data_product_id = data_product_id
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        # Throw some data in it
        rdt = self.ph.rdt_for_data_product(data_product_id)
        rdt['time'] = np.arange(30)
        rdt['temp'] = np.arange(30)
        rdt['pressure'] = np.arange(30)
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.event.clear()

        #--------------------------------------------------------------------------------
        # This is what the user defines either via preload or through the UI
        #--------------------------------------------------------------------------------
        # Where the egg is
        egg_url = self.egg_url

        # Make a parameter function
        owner = 'ion_example.add_arrays'
        func = 'add_arrays'
        arglist = ['a', 'b']
        pf = ParameterFunction(name='add_arrays',
                               function_type=PFT.PYTHON,
                               owner=owner,
                               function=func,
                               args=arglist,
                               egg_uri=egg_url)
        pfunc_id = self.dataset_management.create_parameter_function(pf)
        #--------------------------------------------------------------------------------
        self.addCleanup(self.dataset_management.delete_parameter_function,
                        pfunc_id)

        # Make a data process definition
        dpd = DataProcessDefinition(name='add_arrays',
                                    description='Sums two arrays')
        dpd_id = self.data_process_management.create_data_process_definition(
            dpd, pfunc_id)

        # TODO: assert assoc exists
        argmap = {'a': 'temp', 'b': 'pressure'}
        dp_id = self.data_process_management.create_data_process(
            dpd_id, [data_product_id],
            argument_map=argmap,
            out_param_name='array_sum')

        # Verify that the function worked!
        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['array_sum'], np.arange(0, 60, 2))

        # Verify that we can inspect it as well
        source_code = self.data_process_management.inspect_data_process_definition(
            dpd_id)
        self.assertEquals(source_code,
                          'def add_arrays(a, b):\n    return a+b\n')

        url = self.data_process_management.get_data_process_definition_url(
            dpd_id)
        self.assertEquals(
            url,
            'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg'
        )

        dpd_ids, _ = self.resource_registry.find_resources(
            name='dataqc_spiketest',
            restype=RT.DataProcessDefinition,
            id_only=True)
        dpd_id = dpd_ids[0]
        url = self.data_process_management.get_data_process_definition_url(
            dpd_id)
        self.assertEquals(
            url,
            'https://github.com/ooici/ion-functions/blob/master/ion_functions/qc/qc_functions.py'
        )
    def check_trhph_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing

        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(stream_def_id)
        passing &= self.assertEquals(pdict.name, 'trhph_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        # calibration constants
        a = 1.98e-9
        b = -2.45e-6
        c = 9.28e-4
        d = -0.0888
        e = 0.731

        V_s = 1.506
        V_c = 0.
        T = 11.8

        r1 = 0.906
        r2 = 4.095
        r3 = 4.095

        ORP_V = 1.806
        Cl = np.nan

        offset = 2008
        gain = 4.0
        # Normally this would be 50 per the DPS but the precision is %4.0f which truncates the values to the nearest 1...
        ORP = ((ORP_V * 1000.) - offset) / gain

        ntp_now = time.time() + 2208988800

        rdt['cc_a'] = [a]
        rdt['cc_b'] = [b]
        rdt['cc_c'] = [c]
        rdt['cc_d'] = [d]
        rdt['cc_e'] = [e]
        rdt['ref_temp_volts'] = [V_s]
        rdt['resistivity_temp_volts'] = [V_c]
        rdt['eh_sensor'] = [ORP_V]
        rdt['resistivity_5'] = [r1]
        rdt['resistivity_x1'] = [r2]
        rdt['resistivity_x5'] = [r3]
        rdt['cc_offset'] = [offset]
        rdt['cc_gain'] = [gain]
        rdt['time'] = [ntp_now]

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'], [T], 2)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP], 4)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'], [T], 2)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP], 4)

        return passing
    def check_vel3d_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(stream_def_id)
        self.assertEquals(pdict.name, 'vel3d_b_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        lat = 14.6846
        lon = -51.044
        ts = np.array([3319563600, 3319567200, 3319570800, 3319574400,
            3319578000, 3319581600, 3319585200, 3319588800, 3319592400,
            3319596000], dtype=np.float)

        ve = np.array([ -3.2,  0.1,  0. ,  2.3, -0.1,  5.6,  5.1,  5.8,
            8.8, 10.3])

        vn = np.array([ 18.2,  9.9, 12. ,  6.6, 7.4,  3.4, -2.6,  0.2,
            -1.5,  4.1])
        vu = np.array([-1.1, -0.6, -1.4, -2, -1.7, -2, 1.3, -1.6, -1.1, -4.5])
        ve_expected = np.array([-0.085136, -0.028752, -0.036007, 0.002136,
            -0.023158, 0.043218, 0.056451, 0.054727, 0.088446, 0.085952])
        vn_expected = np.array([ 0.164012,  0.094738,  0.114471,  0.06986,  0.07029,
                    0.049237, -0.009499,  0.019311,  0.012096,  0.070017])
        vu_expected = np.array([-0.011, -0.006, -0.014, -0.02, -0.017, -0.02,
            0.013, -0.016, -0.011, -0.045])

        
        rdt['time'] = ts
        rdt['lat'] = [lat] * 10
        rdt['lon'] = [lon] * 10
        rdt['turbulent_velocity_east'] = ve
        rdt['turbulent_velocity_north'] = vn
        rdt['turbulent_velocity_up'] = vu

        passing &= self.assert_array_almost_equal(rdt['eastward_turbulent_velocity'],
                ve_expected)
        passing &= self.assert_array_almost_equal(rdt['northward_turbulent_velocity'],
                vn_expected)
        passing &= self.assert_array_almost_equal(rdt['upward_turbulent_velocity'],
                vu_expected)


        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['eastward_turbulent_velocity'],
                ve_expected)
        passing &= self.assert_array_almost_equal(rdt['northward_turbulent_velocity'],
                vn_expected)
        passing &= self.assert_array_almost_equal(rdt['upward_turbulent_velocity'],
                vu_expected)
        return passing
    def test_activateInstrumentSample(self):

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)




        raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw')
        parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict')


        # Create InstrumentAgent
        instAgent_obj = IonObject(RT.InstrumentAgent,
                                  name='agent007',
                                  description="SBE37IMAgent",
                                  driver_uri=DRV_URI_GOOD,
                                  stream_configurations = [raw_config, parsed_config])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) " , instDevice_id)


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config,
                                            alerts= [])


        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj,
                                                                               instAgent_id,
                                                                               instDevice_id)


        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True)
        raw_stream_def_id = self.pubsubcli.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id)


        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug( 'new dp_id = %s' , data_product_id1)
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1)



        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s' , dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]


        pid = self.create_logger('ctd_parsed', stream_ids[0] )
        self.loggerpids.append(pid)


        dp_obj = IonObject(RT.DataProduct,
            name='the raw data',
            description='raw stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', data_product_id2)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2)

        # setup notifications for the device and parsed data product
        user_id_1 = self._create_notification( user_name='user_1', instrument_id=instDevice_id, product_id=data_product_id1)
        #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users ---------
        user_id_2 = self._create_notification( user_name='user_2', instrument_id=instDevice_id, product_id=data_product_id2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasStream, None, True)
        log.debug('Data product streams2 = %s' , str(stream_ids))

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id2, PRED.hasDataset, RT.Dataset, True)
        log.debug('Data set for data_product_id2 = %s' , dataset_ids[0])
        self.raw_dataset = dataset_ids[0]


        def start_instrument_agent():
            self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        gevent.joinall([gevent.spawn(start_instrument_agent)])


        #cleanup
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)


        #wait for start
        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     instDevice_id,
                                     ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" %
                                        gate.process_id)

        #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(instDevice_id,
                                              to_name=gate.process_id,
                                              process=FakeProcess())

        log.debug("test_activateInstrumentSample: got ia client %s" , str(self._ia_client))

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: initialize %s" , str(retval))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.INACTIVE, state)

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrument: return value from go_active %s" , str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.IDLE, state)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("(L4-CI-SA-RQ-334): current state after sending go_active command %s" , str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: run %s" , str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=ResourceAgentEvent.PAUSE)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.STOPPED, state)

        cmd = AgentCommand(command=ResourceAgentEvent.RESUME)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=ResourceAgentEvent.CLEAR)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.IDLE, state)

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        for i in xrange(10):
            monitor = DatasetMonitor(dataset_id=self.parsed_dataset)
            self._ia_client.execute_resource(AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE))
            if not monitor.wait():
                raise AssertionError('Failed on the %ith granule' % i)
            monitor.stop()


#        cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE)
#        for i in xrange(10):
#            retval = self._ia_client.execute_resource(cmd)
#            log.debug("test_activateInstrumentSample: return from sample %s" , str(retval))

        log.debug( "test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s" , str(reply))


        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset)
        self.assertIsInstance(replay_data_raw, Granule)
        rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw)
        log.debug("RDT raw: %s", str(rdt_raw.pretty_print()) )

        self.assertIn('raw', rdt_raw)
        raw_vals = rdt_raw['raw']

        all_raw = "".join(raw_vals)

        # look for 't' entered after a prompt -- ">t"
        t_commands = all_raw.count(">t")

        if 10 != t_commands:
            log.error("%s raw_vals: ", len(raw_vals))
            for i, r in enumerate(raw_vals): log.error("raw val %s: %s", i, [r])
            self.fail("Expected 10 't' strings in raw_vals, got %s" % t_commands)
        else:
            log.debug("%s raw_vals: ", len(raw_vals))
            for i, r in enumerate(raw_vals): log.debug("raw val %s: %s", i, [r])

        replay_data_parsed = self.dataretrieverclient.retrieve(self.parsed_dataset)
        self.assertIsInstance(replay_data_parsed, Granule)
        rdt_parsed = RecordDictionaryTool.load_from_granule(replay_data_parsed)
        log.debug("test_activateInstrumentSample: RDT parsed: %s", str(rdt_parsed.pretty_print()) )
        self.assertIn('temp', rdt_parsed)
        temp_vals = rdt_parsed['temp']
        pressure_vals  = rdt_parsed['pressure']
        if 10 != len(temp_vals):
            log.error("%s temp_vals: %s", len(temp_vals), temp_vals)
            self.fail("Expected 10 temp_vals, got %s" % len(temp_vals))


        log.debug("l4-ci-sa-rq-138")
        """
        Physical resource control shall be subject to policy

        Instrument management control capabilities shall be subject to policy

        The actor accessing the control capabilities must be authorized to send commands.

        note from maurice 2012-05-18: Talk to tim M to verify that this is policy.  If it is then talk with Stephen to
                                      get an example of a policy test and use that to create a test stub that will be
                                      completed when we have instrument policies.

        Tim M: The "actor", aka observatory operator, will access the instrument through ION.

        """


        #--------------------------------------------------------------------------------
        # Get the extended data product to see if it contains the granules
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(data_product_id=data_product_id1, user_id=user_id_1)
        def poller(extended_product):
            return len(extended_product.computed.user_notification_requests.value) == 1

        poll(poller, extended_product, timeout=30)

        self._check_computed_attributes_of_extended_product( expected_data_product_id = data_product_id1, extended_data_product = extended_product)


        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(instrument_device_id=instDevice_id, user_id=user_id_1)

        #--------------------------------------------------------------------------------
        # For the second user, check the extended data product and the extended intrument
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(data_product_id=data_product_id2, user_id=user_id_2)
        self._check_computed_attributes_of_extended_product(expected_data_product_id = data_product_id2, extended_data_product = extended_product)


        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(instrument_device_id=instDevice_id, user_id=user_id_2)
        self._check_computed_attributes_of_extended_instrument(expected_instrument_device_id = instDevice_id, extended_instrument = extended_instrument)

        #--------------------------------------------------------------------------------
        # Deactivate loggers
        #--------------------------------------------------------------------------------

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        self.dpclient.delete_data_product(data_product_id1)
        self.dpclient.delete_data_product(data_product_id2)
Ejemplo n.º 24
0
class TestCoverageQC(TestQCFunctions):

    def init_check(self):
        self.dp_id = self.create_data_product(name=uuid4().hex, stream_def_id=self.stream_def_id)
        self.data_product_management.activate_data_product_persistence(self.dp_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, self.dp_id)
        self.dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(self.dp_id)
        self.dataset_monitor = DatasetMonitor(self.dataset_id)
        self.addCleanup(self.dataset_monitor.stop)

    def test_qc_functions(self):
        self.check_fill_values()
        self.check_global_range()
        self.check_spike()
        self.check_stuck_value()
        self.check_gradient()
        self.check_trend()
        self.check_localrange()

    def check_global_range(self):
        TestQCFunctions.check_global_range(self)
        self.init_check()

        flagged = Event()
        def cb(event, *args, **kwargs):
            times = event.temporal_values
            self.assertEquals(times,[0.0, 7.0])
            flagged.set()

        event_subscriber = EventSubscriber(event_type=OT.ParameterQCEvent,origin=self.dp_id, callback=cb, auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_glblrng_qc'], [0, 1, 1, 1, 1, 1, 1, 0])
        self.assertTrue(flagged.wait(10))

    def check_fill_values(self):
        log.info('check_fill_values')
        self.new_rdt()
        self.init_check()
        self.rdt['time'] = np.arange(5)
        self.rdt['temp'] = [12] * 5
        self.rdt.fetch_lookup_values()

        np.testing.assert_array_equal(self.rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_gradtst_qc'], [-99] * 5)
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)

        self.assertTrue(self.dataset_monitor.wait())
        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [-99] * 5)

    def check_spike(self):
        log.info('check_spike')
        TestQCFunctions.check_spike(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_spketst_qc'], [1, 1, 0, 1, 1, 1, 1, 1])


    def check_stuck_value(self):
        log.info('check_stuck_value')
        TestQCFunctions.check_stuck_value(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_stuckvl_qc'], [1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
    
    def check_gradient(self):
        log.info('check_gradient')
        TestQCFunctions.check_gradient(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [1, 1, 0, 0, 1])

    def check_trend(self):
        log.info('check_trend')
        TestQCFunctions.check_trend(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_trndtst_qc'], [1] * 10)

    def check_localrange(self):
        log.info('check_localrange')
        TestQCFunctions.check_localrange(self)
        self.init_check()

        flagged = Event()
        def cb(event, *args, **kwargs):
            times = event.temporal_values
            if not event.qc_parameter == 'tempwat_loclrng_qc':
                return
            np.testing.assert_array_equal( times, np.array([ 3580144708.7555027, 3580144709.7555027, 3580144710.7555027, 3580144711.7555027, 3580144712.7555027]))
            flagged.set()

        event_subscriber = EventSubscriber(event_type = OT.ParameterQCEvent, origin=self.dp_id, callback=cb, auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())
        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_loclrng_qc'], [1 ,1 ,1 ,1 ,1 ,0 ,0 ,0 ,0 ,0])
        self.assertTrue(flagged.wait(10))
Ejemplo n.º 25
0
    def test_pydap(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition(
            'example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)

        dp = DataProduct(name='example')

        data_product_id = self.data_product_management.create_data_product(
            dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product,
                        data_product_id)

        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        self.addCleanup(
            self.data_product_management.suspend_data_product_persistence,
            data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id,
                                                         PRED.hasDataset,
                                                         id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt, 10)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.wait())

        gevent.sleep(
            1)  # Yield to other greenlets, had an issue with connectivity

        pydap_host = CFG.get_safe('server.pydap.host', 'localhost')
        pydap_port = CFG.get_safe('server.pydap.port', 8001)
        url = 'http://%s:%s/%s' % (pydap_host, pydap_port, data_product_id)

        for i in xrange(
                3
        ):  # Do it three times to test that the cache doesn't corrupt the requests/responses
            ds = open_url(url)

            np.testing.assert_array_equal(list(ds['data']['time']),
                                          np.arange(10))
            untested = []
            for k, v in rdt.iteritems():
                if k == rdt.temporal_parameter:
                    continue
                context = rdt.context(k)
                if isinstance(context.param_type, QuantityType):
                    np.testing.assert_array_equal(list(ds['data'][k]), rdt[k])
                elif isinstance(context.param_type, ArrayType):
                    if context.param_type.inner_encoding is None:
                        values = np.empty(rdt[k].shape, dtype='O')
                        for i, obj in enumerate(rdt[k]):
                            values[i] = str(obj)
                        np.testing.assert_array_equal(list(ds['data'][k]),
                                                      values)
                    elif len(rdt[k].shape) > 1:
                        values = np.empty(rdt[k].shape[0], dtype='O')
                        for i in xrange(rdt[k].shape[0]):
                            values[i] = ','.join(
                                map(lambda x: str(x), rdt[k][i].tolist()))
                elif isinstance(context.param_type, ConstantType):
                    np.testing.assert_array_equal(list(ds['data'][k]), rdt[k])
                elif isinstance(context.param_type, CategoryType):
                    np.testing.assert_array_equal(list(ds['data'][k]),
                                                  rdt[k].astype('|S'))
                else:
                    untested.append('%s (%s)' % (k, context.param_type))
            if untested:
                raise AssertionError('Untested parameters: %s' % untested)
Ejemplo n.º 26
0
class TestCoverageQC(TestQCFunctions):

    def init_check(self):
        self.dp_id = self.create_data_product(name=uuid4().hex, stream_def_id=self.stream_def_id)
        self.data_product_management.activate_data_product_persistence(self.dp_id)
        self.addCleanup(self.data_product_management.suspend_data_product_persistence, self.dp_id)
        self.dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(self.dp_id)
        self.dataset_monitor = DatasetMonitor(self.dataset_id)
        self.addCleanup(self.dataset_monitor.stop)

    def test_qc_functions(self):
        self.check_fill_values()
        self.check_global_range()
        self.check_spike()
        self.check_stuck_value()
        self.check_gradient()
        self.check_trend()
        self.check_localrange()

    def check_global_range(self):
        TestQCFunctions.check_global_range(self)
        self.init_check()

        flagged = Event()
        def cb(event, *args, **kwargs):
            times = event.temporal_values
            self.assertEquals(times,[0.0, 7.0])
            flagged.set()

        event_subscriber = EventSubscriber(event_type=OT.ParameterQCEvent,origin=self.dp_id, callback=cb, auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_glblrng_qc'], [0, 1, 1, 1, 1, 1, 1, 0])
        self.assertTrue(flagged.wait(10))

    def check_fill_values(self):
        log.info('check_fill_values')
        self.new_rdt()
        self.init_check()
        self.rdt['time'] = np.arange(5)
        self.rdt['temp'] = [12] * 5
        self.rdt.fetch_lookup_values()

        np.testing.assert_array_equal(self.rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_gradtst_qc'], [-99] * 5)
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)

        self.assertTrue(self.dataset_monitor.wait())
        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [-99] * 5)

    def check_spike(self):
        log.info('check_spike')
        TestQCFunctions.check_spike(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_spketst_qc'], [1, 1, 0, 1, 1, 1, 1, 1])


    def check_stuck_value(self):
        log.info('check_stuck_value')
        TestQCFunctions.check_stuck_value(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_stuckvl_qc'], [1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
    
    def check_gradient(self):
        log.info('check_gradient')
        TestQCFunctions.check_gradient(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [1, 1, 0, 0, 1])

    def check_trend(self):
        log.info('check_trend')
        TestQCFunctions.check_trend(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_trndtst_qc'], [1] * 10)

    def check_localrange(self):
        log.info('check_localrange')
        TestQCFunctions.check_localrange(self)
        self.init_check()

        flagged = Event()
        def cb(event, *args, **kwargs):
            times = event.temporal_values
            if not event.qc_parameter == 'tempwat_loclrng_qc':
                return
            np.testing.assert_array_equal( times, np.array([ 3580144708.7555027, 3580144709.7555027, 3580144710.7555027, 3580144711.7555027, 3580144712.7555027]))
            flagged.set()

        event_subscriber = EventSubscriber(event_type = OT.ParameterQCEvent, origin=self.dp_id, callback=cb, auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())
        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_loclrng_qc'], [1 ,1 ,1 ,1 ,1 ,0 ,0 ,0 ,0 ,0])
        self.assertTrue(flagged.wait(10))
Ejemplo n.º 27
0
    def check_trhph_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing

        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(
            stream_def_id)
        passing &= self.assertEquals(pdict.name, 'trhph_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        # calibration constants
        a = 1.98e-9
        b = -2.45e-6
        c = 9.28e-4
        d = -0.0888
        e = 0.731

        V_s = 1.506
        V_c = 0.
        T = 11.8

        r1 = 0.906
        r2 = 4.095
        r3 = 4.095

        ORP_V = 1.806
        Cl = np.nan

        offset = 2008
        gain = 4.0
        # Normally this would be 50 per the DPS but the precision is %4.0f which truncates the values to the nearest 1...
        ORP = ((ORP_V * 1000.) - offset) / gain

        ntp_now = time.time() + 2208988800

        rdt['cc_a'] = [a]
        rdt['cc_b'] = [b]
        rdt['cc_c'] = [c]
        rdt['cc_d'] = [d]
        rdt['cc_e'] = [e]
        rdt['ref_temp_volts'] = [V_s]
        rdt['resistivity_temp_volts'] = [V_c]
        rdt['eh_sensor'] = [ORP_V]
        rdt['resistivity_5'] = [r1]
        rdt['resistivity_x1'] = [r2]
        rdt['resistivity_x5'] = [r3]
        rdt['cc_offset'] = [offset]
        rdt['cc_gain'] = [gain]
        rdt['time'] = [ntp_now]

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'],
                                                  [T], 2)
        passing &= self.assert_array_almost_equal(
            rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP],
                                                  4)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'],
                                                  [T], 2)
        passing &= self.assert_array_almost_equal(
            rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP],
                                                  4)

        return passing
Ejemplo n.º 28
0
    def check_vel3d_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(
            stream_def_id)
        self.assertEquals(pdict.name, 'vel3d_b_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        lat = 14.6846
        lon = -51.044
        ts = np.array([
            3319563600, 3319567200, 3319570800, 3319574400, 3319578000,
            3319581600, 3319585200, 3319588800, 3319592400, 3319596000
        ],
                      dtype=np.float)

        ve = np.array([-3.2, 0.1, 0., 2.3, -0.1, 5.6, 5.1, 5.8, 8.8, 10.3])

        vn = np.array([18.2, 9.9, 12., 6.6, 7.4, 3.4, -2.6, 0.2, -1.5, 4.1])
        vu = np.array([-1.1, -0.6, -1.4, -2, -1.7, -2, 1.3, -1.6, -1.1, -4.5])
        ve_expected = np.array([
            -0.085136, -0.028752, -0.036007, 0.002136, -0.023158, 0.043218,
            0.056451, 0.054727, 0.088446, 0.085952
        ])
        vn_expected = np.array([
            0.164012, 0.094738, 0.114471, 0.06986, 0.07029, 0.049237,
            -0.009499, 0.019311, 0.012096, 0.070017
        ])
        vu_expected = np.array([
            -0.011, -0.006, -0.014, -0.02, -0.017, -0.02, 0.013, -0.016,
            -0.011, -0.045
        ])

        rdt['time'] = ts
        rdt['lat'] = [lat] * 10
        rdt['lon'] = [lon] * 10
        rdt['turbulent_velocity_east'] = ve
        rdt['turbulent_velocity_north'] = vn
        rdt['turbulent_velocity_up'] = vu

        passing &= self.assert_array_almost_equal(
            rdt['eastward_turbulent_velocity'], ve_expected)
        passing &= self.assert_array_almost_equal(
            rdt['northward_turbulent_velocity'], vn_expected)
        passing &= self.assert_array_almost_equal(
            rdt['upward_turbulent_velocity'], vu_expected)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(
            rdt['eastward_turbulent_velocity'], ve_expected)
        passing &= self.assert_array_almost_equal(
            rdt['northward_turbulent_velocity'], vn_expected)
        passing &= self.assert_array_almost_equal(
            rdt['upward_turbulent_velocity'], vu_expected)
        return passing
    def create_device_site_deployment(self,
                                      dep_name="Deployment",
                                      starting=''):
        from interface.objects import StreamConfiguration, StreamConfigurationType, InstrumentDevice
        from interface.objects import InstrumentModel, PlatformAgent, InstrumentSite, TemporalBounds, Deployment
        from interface.objects import RemotePlatformDeploymentContext

        stream_conf = StreamConfiguration(
            stream_name="CTD 1 Parsed Stream",
            parameter_dictionary_name='ctd_parsed_param_dict',
            stream_type=StreamConfigurationType.PARSED)
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            name='ctd_parsed_param_dict')
        stream_def_id = self.create_stream_definition(
            name='CTD 1', parameter_dictionary_id=pdict_id)
        data_product_id = self.create_data_product(
            name="DDP_1",
            stream_def_id=stream_def_id,
            stream_configuration=stream_conf)
        self.activate_data_product(data_product_id)

        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        stream_def = self.resource_registry.find_objects(
            data_product_id, PRED.hasStreamDefinition)[0][0]
        param_dict = self.resource_registry.find_objects(
            stream_def._id, PRED.hasParameterDictionary)[0][0]
        # Add data to the DataProduct
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        rdt = self.ph.get_rdt(stream_def._id)
        rdt_ = self.ph.rdt_for_data_product(data_product_id)
        self.assertEquals(rdt.fields, rdt_.fields)
        rdt['time'] = [0, 1, 2, 3]
        rdt['temp'] = [10, 11, 12, 13]
        self.ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.wait())

        # Create Device
        device = InstrumentDevice(name='Device 1')
        device_id = self.instrument_management.create_instrument_device(device)
        self.data_acquisition_management.register_instrument(device_id)
        self.data_acquisition_management.assign_data_product(
            device_id, data_product_id)

        # Create Model
        model = InstrumentModel(name='Model 1')
        model_id = self.instrument_management.create_instrument_model(model)
        self.instrument_management.assign_instrument_model_to_instrument_device(
            model_id, device_id)

        # Create AgentDefinition
        ad = PlatformAgent(stream_configurations=[stream_conf])
        ad_id, _ = self.resource_registry.create(ad)

        # Create Site
        site = InstrumentSite(name='Site 1',
                              stream_configurations=[stream_conf])
        site_id, _ = self.resource_registry.create(site)
        self.resource_registry.create_association(site_id, PRED.hasModel,
                                                  model_id)
        self.resource_registry.create_association(site_id,
                                                  PRED.hasAgentDefinition,
                                                  ad_id)

        # TemporalBounds of the Deployment
        temp_bounds = TemporalBounds(start_datetime=starting, end_datetime='')
        # Create Deployment
        deployment = Deployment(name=dep_name,
                                type="RemotePlatform",
                                context=RemotePlatformDeploymentContext(),
                                constraint_list=[temp_bounds])
        deployment_id = self.observatory_management.create_deployment(
            deployment=deployment, site_id=site_id, device_id=device_id)

        return site_id, device_id, dataset_id, deployment_id, param_dict, data_product_id
    def test_lookup_values(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_lookups()
        stream_def_id = self.pubsubcli.create_stream_definition('lookup', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsubcli.delete_stream_definition, stream_def_id)

        data_product = DataProduct(name='lookup data product')

        data_product_id = self.dpsc_cli.create_data_product(data_product, stream_definition_id=stream_def_id)
        self.addCleanup(self.dpsc_cli.delete_data_product, data_product_id)
        data_producer = DataProducer(name='producer')
        data_producer.producer_context = DataProcessProducerContext()
        data_producer.producer_context.configuration['qc_keys'] = ['offset_document']
        data_producer_id, _ = self.rrclient.create(data_producer)
        self.addCleanup(self.rrclient.delete, data_producer_id)
        assoc,_ = self.rrclient.create_association(subject=data_product_id, object=data_producer_id, predicate=PRED.hasDataProducer)
        self.addCleanup(self.rrclient.delete_association, assoc)

        document_keys = self.damsclient.list_qc_references(data_product_id)
            
        self.assertEquals(document_keys, ['offset_document'])
        svm = StoredValueManager(self.container)
        svm.stored_value_cas('offset_document', {'offset_a':2.0})
        self.dpsc_cli.activate_data_product_persistence(data_product_id)
        dataset_ids, _ = self.rrclient.find_objects(subject=data_product_id, predicate=PRED.hasDataset, id_only=True)
        dataset_id = dataset_ids[0]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [0]
        rdt['temp'] = [20.]
        granule = rdt.to_granule()

        stream_ids, _ = self.rrclient.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True)
        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish(granule)

        self.assertTrue(dataset_monitor.wait())

        granule = self.data_retriever.retrieve(dataset_id)
        rdt2 = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['temp'], rdt2['temp'])
        np.testing.assert_array_almost_equal(rdt2['calibrated'], np.array([22.0]))


        svm.stored_value_cas('updated_document', {'offset_a':3.0})
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ep = EventPublisher(event_type=OT.ExternalReferencesUpdatedEvent)
        ep.publish_event(origin=data_product_id, reference_keys=['updated_document'])

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [1]
        rdt['temp'] = [20.]
        granule = rdt.to_granule()
        gevent.sleep(2) # Yield so that the event goes through
        publisher.publish(granule)
        self.assertTrue(dataset_monitor.wait())

        granule = self.data_retriever.retrieve(dataset_id)
        rdt2 = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt2['temp'],np.array([20.,20.]))
        np.testing.assert_array_almost_equal(rdt2['calibrated'], np.array([22.0,23.0]))