Пример #1
0
    def test_replay_with_parameters(self):
        # --------------------------------------------------------------------------------
        # Create the configurations and the dataset
        # --------------------------------------------------------------------------------
        stream_id = self.pubsub_management.create_stream()
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset()
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
        )

        # --------------------------------------------------------------------------------
        # Coerce the datastore into existence (beats race condition)
        # --------------------------------------------------------------------------------
        self.get_datastore(dataset_id)

        self.launch_producer(stream_id)

        self.wait_until_we_have_enough_granules(dataset_id, 4)

        query = {"start_time": 0, "end_time": 20, "parameters": ["time", "temp"]}
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id, query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        comp = np.arange(20) == rdt["time"]
        self.assertTrue(comp.all(), "%s" % rdt.pretty_print())
        self.assertEquals(set(rdt.iterkeys()), set(["time", "temp"]))
Пример #2
0
    def test_stuck_value_test(self):
        TestQCFunctions.test_stuck_value_test(self)
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.dataset_monitor.event.wait(10)

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_stuckvl_qc'], [1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
Пример #3
0
    def test_replay_by_time(self):
        log.info("starting test...")

        # --------------------------------------------------------------------------------
        # Create the necessary configurations for the test
        # --------------------------------------------------------------------------------
        stream_id = self.pubsub_management.create_stream()
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset()
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
        )
        # --------------------------------------------------------------------------------
        # Create the datastore first,
        # --------------------------------------------------------------------------------
        # There is a race condition sometimes between the services and the process for
        # the creation of the datastore and it's instance, this ensures the datastore
        # exists before the process is even subscribing to data.
        self.get_datastore(dataset_id)

        self.publish_fake_data(stream_id)
        self.wait_until_we_have_enough_granules(dataset_id, 2)  # I just need two

        replay_granule = self.data_retriever.retrieve(dataset_id, {"start_time": 0, "end_time": 6})

        rdt = RecordDictionaryTool.load_from_granule(replay_granule)

        comp = rdt["time"] == np.array([0, 1, 2, 3, 4, 5])

        try:
            log.info("Compared granule: %s", replay_granule.__dict__)
            log.info("Granule tax: %s", replay_granule.taxonomy.__dict__)
        except:
            pass
        self.assertTrue(comp.all())
Пример #4
0
    def test_ingestion_pause(self):
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        ingestion_config_id = self.get_ingestion_config()
        self.start_ingestion(ctd_stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, ctd_stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)
        publisher.publish(rdt.to_granule())
        self.assertTrue(monitor.wait())
        granule = self.data_retriever.retrieve(dataset_id)


        self.ingestion_management.pause_data_stream(ctd_stream_id, ingestion_config_id)

        monitor.event.clear()
        rdt['time'] = np.arange(10,20)
        publisher.publish(rdt.to_granule())
        self.assertFalse(monitor.event.wait(1))

        self.ingestion_management.resume_data_stream(ctd_stream_id, ingestion_config_id)

        self.assertTrue(monitor.wait())

        granule = self.data_retriever.retrieve(dataset_id)
        rdt2 = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_almost_equal(rdt2['time'], np.arange(20))
Пример #5
0
    def _extract_granule_data(self, granules):
        """
        Pull all data out of all granules and return a dict of values
        """
        result = []
        for granule in granules:
            group = []
            log.debug("Granule: %s", granule)
            rdt = RecordDictionaryTool.load_from_granule(granule)

            # Store the data from each record
            for key, value in rdt.iteritems():
                for i in range(0, len(value)):
                    if len(group) <= i:
                        group.append({})
                    group[i][key] = value[i]

                    # Store the connection information for each record
                    if not 'connection_index' in group[i]:
                        group[i]['connection_index'] = granule.connection_index

                    if not 'connection_id' in group[i]:
                        group[i]['connection_id'] = granule.connection_id



            result += group

        log.debug("extracted granules: %s", pprint.pformat(result))

        return result
Пример #6
0
    def test_repersist_data(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition(name='ctd', parameter_dictionary_id=pdict_id)
        stream_id, route = self.pubsub_management.create_stream(name='repersist', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id)
        self.get_datastore(dataset_id)
        self.publish_hifi(stream_id,route,0)
        self.publish_hifi(stream_id,route,1)
        self.wait_until_we_have_enough_granules(dataset_id,2)
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id,dataset_id=dataset_id)
        self.publish_hifi(stream_id,route,2)
        self.publish_hifi(stream_id,route,3)
        self.wait_until_we_have_enough_granules(dataset_id,4)
        success = False
        with gevent.timeout.Timeout(5):
            while not success:

                replay_granule = self.data_retriever.retrieve(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(0,40)
                if not isinstance(comp,bool):
                    success = comp.all()
                gevent.sleep(1)

        self.assertTrue(success)
Пример #7
0
    def test_last_granule(self):
        # --------------------------------------------------------------------------------
        # Create the necessary configurations for the test
        # --------------------------------------------------------------------------------
        stream_id = self.pubsub_management.create_stream()
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset()
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
        )
        # --------------------------------------------------------------------------------
        # Create the datastore first,
        # --------------------------------------------------------------------------------
        self.get_datastore(dataset_id)

        self.publish_hifi(stream_id, 0)
        self.publish_hifi(stream_id, 1)

        self.wait_until_we_have_enough_granules(dataset_id, 2)  # I just need two

        replay_granule = self.data_retriever.retrieve_last_granule(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(replay_granule)

        comp = rdt["time"] == np.arange(10) + 10

        self.assertTrue(comp.all())
Пример #8
0
 def validate_granule_subscription(self, msg, route, stream_id):
     if msg == {}:
         return
     rdt = RecordDictionaryTool.load_from_granule(msg)
     log.info('%s', rdt.pretty_print())
     self.assertIsInstance(msg,Granule,'Message is improperly formatted. (%s)' % type(msg))
     self.event.set()
Пример #9
0
    def test_replay_with_parameters(self):
        # --------------------------------------------------------------------------------
        # Create the configurations and the dataset
        # --------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext("binary", param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context("binary", bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext("records", param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context("records", rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary(
            "replay_pdict", parameter_context_ids=context_ids, temporal_context="time"
        )

        stream_def_id = self.pubsub_management.create_stream_definition(
            "replay_stream", parameter_dictionary_id=pdict_id
        )

        stream_id, route = self.pubsub_management.create_stream(
            "replay_with_params", exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id
        )
        config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
        )

        # --------------------------------------------------------------------------------
        # Coerce the datastore into existence (beats race condition)
        # --------------------------------------------------------------------------------
        self.get_datastore(dataset_id)

        self.launch_producer(stream_id)

        self.wait_until_we_have_enough_granules(dataset_id, 40)

        query = {
            "start_time": 0 - 2208988800,
            "end_time": 20 - 2208988800,
            "stride_time": 2,
            "parameters": ["time", "temp"],
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id, query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        comp = np.arange(0, 20, 2) == rdt["time"]
        self.assertTrue(comp.all(), "%s" % rdt.pretty_print())
        self.assertEquals(set(rdt.iterkeys()), set(["time", "temp"]))

        extents = self.dataset_management.dataset_extents(dataset_id=dataset_id, parameters=["time", "temp"])
        self.assertTrue(extents["time"] >= 20)
        self.assertTrue(extents["temp"] >= 20)

        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)
Пример #10
0
    def test_repersist_data(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.publish_hifi(stream_id,route,0)
        self.publish_hifi(stream_id,route,1)
        self.wait_until_we_have_enough_granules(dataset_id,20)
        config_id = self.get_ingestion_config()
        self.ingestion_management.unpersist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id,ingestion_configuration_id=config_id,dataset_id=dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)
        self.publish_hifi(stream_id,route,2)
        self.publish_hifi(stream_id,route,3)
        self.wait_until_we_have_enough_granules(dataset_id,40)
        success = False
        with gevent.timeout.Timeout(5):
            while not success:

                replay_granule = self.data_retriever.retrieve(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(0,40)
                if not isinstance(comp,bool):
                    success = comp.all()
                gevent.sleep(1)

        self.assertTrue(success)
Пример #11
0
    def check_fill_values(self):
        log.info('check_fill_values')
        self.new_rdt()
        self.init_check()
        self.rdt['time'] = np.arange(5)
        self.rdt['temp'] = [12] * 5
        self.rdt.fetch_lookup_values()

        np.testing.assert_array_equal(self.rdt['tempwat_glblrng_qc'],
                                      [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_spketst_qc'],
                                      [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_stuckvl_qc'],
                                      [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_trndtst_qc'],
                                      [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_gradtst_qc'],
                                      [-99] * 5)
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)

        self.dataset_monitor.event.wait(10)
        rdt = RecordDictionaryTool.load_from_granule(
            self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [-99] * 5)
Пример #12
0
    def check_localrange(self):
        log.info('check_localrange')
        TestQCFunctions.check_localrange(self)
        self.init_check()

        flagged = Event()

        def cb(event, *args, **kwargs):
            times = event.temporal_values
            if not event.qc_parameter == 'tempwat_loclrng_qc':
                return
            np.testing.assert_array_equal(
                times,
                np.array([
                    3580144708.7555027, 3580144709.7555027, 3580144710.7555027,
                    3580144711.7555027, 3580144712.7555027
                ]))
            flagged.set()

        event_subscriber = EventSubscriber(event_type=OT.ParameterQCEvent,
                                           origin=self.dp_id,
                                           callback=cb,
                                           auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.dataset_monitor.event.wait(10)
        rdt = RecordDictionaryTool.load_from_granule(
            self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_loclrng_qc'],
                                             [1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
        self.assertTrue(flagged.wait(10))
Пример #13
0
    def check_global_range(self):
        TestQCFunctions.check_global_range(self)
        self.init_check()

        flagged = Event()

        def cb(event, *args, **kwargs):
            times = event.temporal_values
            self.assertEquals(times, [0.0, 7.0])
            flagged.set()

        event_subscriber = EventSubscriber(event_type=OT.ParameterQCEvent,
                                           origin=self.dp_id,
                                           callback=cb,
                                           auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.dataset_monitor.event.wait(10)

        rdt = RecordDictionaryTool.load_from_granule(
            self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_glblrng_qc'],
                                             [0, 1, 1, 1, 1, 1, 1, 0])
        self.assertTrue(flagged.wait(10))
Пример #14
0
    def test_ingestion_gap_analysis(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        connection1 = uuid4().hex
        connection2 = uuid4().hex

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [0]
        rdt['temp'] = [0]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection1,connection_index='0'))
        rdt['time'] = [1]
        rdt['temp'] = [1]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection1,connection_index=1))
        rdt['time'] = [2]
        rdt['temp'] = [2]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection1,connection_index='3')) # Gap, missed message
        rdt['time'] = [3]
        rdt['temp'] = [3]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection2,connection_index='3')) # Gap, new connection
        rdt['time'] = [4]
        rdt['temp'] = [4]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection2,connection_index='4'))
        rdt['time'] = [5]
        rdt['temp'] = [5]
        self.publish_and_wait(dataset_id, rdt.to_granule(connection_id=connection2,connection_index=5))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(6))
        np.testing.assert_array_equal(rdt['temp'], np.arange(6))
        return dataset_id
Пример #15
0
    def _extract_granule_data(self, granules):
        """
        Pull all data out of all granules and return a dict of values
        """
        result = []
        for granule in granules:
            group = []
            log.debug("Granule: %s", granule)
            rdt = RecordDictionaryTool.load_from_granule(granule)

            # Store the data from each record
            for key, value in rdt.iteritems():
                for i in range(0, len(value)):
                    if len(group) <= i:
                        group.append({})
                    group[i][key] = value[i]

                    # Store the connection information for each record
                    if not 'connection_index' in group[i]:
                        group[i]['connection_index'] = granule.connection_index

                    if not 'connection_id' in group[i]:
                        group[i]['connection_id'] = granule.connection_id

            result += group

        log.debug("extracted granules: %s", pprint.pformat(result))

        return result
Пример #16
0
        def verify_points():
                replay_granule = self.data_retriever.retrieve_last_data_points(dataset_id,5)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(15,20)
                if not isinstance(comp,bool):
                    return comp.all()
                return False
Пример #17
0
    def check_trend(self):
        log.info('check_trend')
        TestQCFunctions.check_trend(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_trndtst_qc'], [1] * 10)
Пример #18
0
        def verifier():
                replay_granule = self.data_retriever.retrieve_last_granule(dataset_id)

                rdt = RecordDictionaryTool.load_from_granule(replay_granule)

                comp = rdt['time'] == np.arange(10) + 10
                if not isinstance(comp,bool):
                    return comp.all()
                return False
Пример #19
0
    def check_gradient(self):
        log.info('check_gradient')
        TestQCFunctions.check_gradient(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.dataset_monitor.event.wait(10)

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [1, 1, 0, 0, 1])
    def check_trend(self):
        log.info('check_trend')
        TestQCFunctions.check_trend(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_trndtst_qc'], [1] * 10)
Пример #21
0
    def get_visualization_image(self, data_product_id='', visualization_parameters=None, callback=''):

        # Error check
        if not data_product_id:
            raise BadRequest("The data_product_id parameter is missing")
        if visualization_parameters == {}:
            visualization_parameters = None

        # Extract the retrieval related parameters. Definitely init all parameters first
        query = None
        if visualization_parameters :
            query = {'parameters':[]}
            # Error check and damage control. Definitely need time
            if 'parameters' in visualization_parameters:
                if not 'time' in visualization_parameters['parameters']:
                    visualization_parameters['parameters'].append('time')
                query['parameters'] = visualization_parameters['parameters']

            if 'stride_time' in visualization_parameters:
                query['stride_time'] = visualization_parameters['stride_time']
            if 'start_time' in visualization_parameters:
                query['start_time'] = visualization_parameters['start_time']
            if 'end_time' in visualization_parameters:
                query['end_time'] = visualization_parameters['end_time']

        # get the dataset_id associated with the data_product. Need it to do the data retrieval
        ds_ids,_ = self.clients.resource_registry.find_objects(data_product_id, PRED.hasDataset, RT.DataSet, True)
        if ds_ids is None or not ds_ids:
            return None

        # Ideally just need the latest granule to figure out the list of images
        #replay_granule = self.clients.data_retriever.retrieve(ds_ids[0],{'start_time':0,'end_time':2})
        retrieved_granule = self.clients.data_retriever.retrieve(ds_ids[0], query=query)

        if retrieved_granule is None:
            return None

        # send the granule through the transform to get the matplotlib graphs
        mpl_pdict_id = self.clients.dataset_management.read_parameter_dictionary_by_name('graph_image_param_dict',id_only=True)
        mpl_stream_def = self.clients.pubsub_management.create_stream_definition('mpl', parameter_dictionary_id=mpl_pdict_id)
        mpl_data_granule = VizTransformMatplotlibGraphsAlgorithm.execute(retrieved_granule, config=visualization_parameters, params=mpl_stream_def)

        if mpl_data_granule == None:
            return None

        mpl_rdt = RecordDictionaryTool.load_from_granule(mpl_data_granule)

        ret_dict = dict()
        ret_dict['content_type'] = (get_safe(mpl_rdt, "content_type"))[0]
        ret_dict['image_name'] = (get_safe(mpl_rdt, "image_name"))[0]
        # reason for encoding as base64 string is otherwise message pack complains about the bit stream
        ret_dict['image_obj'] = base64.encodestring((get_safe(mpl_rdt, "image_obj"))[0])

        if callback == '':
            return ret_dict
        else:
            return callback + "(" + simplejson.dumps(ret_dict) + ")"
Пример #22
0
    def _process_visualization_message(self, messages):

        final_hc_data = []
        final_hc_data_no_numpy = []

        for message in messages:

            if message == None:
                continue

            message_data = message.body

            if isinstance(message_data, Granule):

                rdt = RecordDictionaryTool.load_from_granule(message_data)
                hc_data_arr = get_safe(rdt, 'hc_data')

                # IF this granule does not contain google dt, skip
                if hc_data_arr is None:
                    continue

                hc_data = hc_data_arr[0]

                for series in hc_data:
                    if not series.has_key("name"):
                        continue
                    series_name = series["name"]

                    # find index in final data. If it does not exist, create it
                    final_hc_data_idx = -1
                    idx = 0
                    for _s in final_hc_data:
                        if _s["name"] == series_name:
                            final_hc_data_idx = idx
                            break
                        idx += 1

                    # create an entry in the final hc structure if there's none for this series
                    if final_hc_data_idx == -1:
                        final_hc_data.append({})
                        final_hc_data_idx = len(final_hc_data) - 1
                        final_hc_data[final_hc_data_idx]["name"] = series_name
                        final_hc_data[final_hc_data_idx]["data"] = []

                        if series.has_key("visible"):
                            final_hc_data[final_hc_data_idx][
                                "visible"] = series["visible"]
                        if series.has_key("tooltip"):
                            final_hc_data[final_hc_data_idx][
                                "tooltip"] = series["tooltip"]

                    # Append the series data to the final hc structure
                    final_hc_data[final_hc_data_idx]["data"] += series[
                        "data"].tolist()

        return json.dumps(final_hc_data)
Пример #23
0
    def test_replay_with_parameters(self):
        #--------------------------------------------------------------------------------
        # Create the configurations and the dataset
        #--------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        

        stream_def_id = self.pubsub_management.create_stream_definition('replay_stream', parameter_dictionary_id=pdict_id)
        
        stream_id, route  = self.pubsub_management.create_stream('replay_with_params', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        config_id  = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id)

        dataset_modified = Event()
        def cb(*args, **kwargs):
            dataset_modified.set()
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id)
        es.start()

        self.addCleanup(es.stop)

        self.publish_fake_data(stream_id, route)

        self.assertTrue(dataset_modified.wait(30))

        query = {
            'start_time': 0 - 2208988800,
            'end_time':   20 - 2208988800,
            'stride_time' : 2,
            'parameters': ['time','temp']
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id,query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        comp = np.arange(0,20,2) == rdt['time']
        self.assertTrue(comp.all(),'%s' % rdt.pretty_print())
        self.assertEquals(set(rdt.iterkeys()), set(['time','temp']))

        extents = self.dataset_management.dataset_extents(dataset_id=dataset_id, parameters=['time','temp'])
        self.assertTrue(extents['time']>=20)
        self.assertTrue(extents['temp']>=20)

        self.streams.append(stream_id)
        self.stop_ingestion(stream_id)
Пример #24
0
 def validate_granule_subscription(self, msg, route, stream_id):
     '''
     Validation for granule format
     '''
     if msg == {}:
         return
     rdt = RecordDictionaryTool.load_from_granule(msg)
     log.info('%s', rdt.pretty_print())
     self.assertIsInstance(msg,Granule,'Message is improperly formatted. (%s)' % type(msg))
     self.event.set()
    def _process_visualization_message(self, messages):

        final_hc_data = []
        final_hc_data_no_numpy = []

        for message in messages:

            if message == None:
                continue

            message_data = message.body

            if isinstance(message_data,Granule):

                rdt = RecordDictionaryTool.load_from_granule(message_data)
                hc_data_arr = get_safe(rdt, 'hc_data')

                # IF this granule does not contain google dt, skip
                if hc_data_arr is None:
                    continue

                hc_data = hc_data_arr[0]

                for series in hc_data:
                    if not series.has_key("name"):
                        continue
                    series_name = series["name"]

                    # find index in final data. If it does not exist, create it
                    final_hc_data_idx = -1
                    idx = 0
                    for _s in final_hc_data:
                        if _s["name"] == series_name:
                            final_hc_data_idx = idx
                            break
                        idx += 1


                    # create an entry in the final hc structure if there's none for this series
                    if final_hc_data_idx == -1:
                        final_hc_data.append({})
                        final_hc_data_idx = len(final_hc_data) - 1
                        final_hc_data[final_hc_data_idx]["name"] = series_name
                        final_hc_data[final_hc_data_idx]["data"] = []

                        if series.has_key("visible"):
                            final_hc_data[final_hc_data_idx]["visible"] = series["visible"]
                        if series.has_key("tooltip"):
                            final_hc_data[final_hc_data_idx]["tooltip"] = series["tooltip"]

                    # Append the series data to the final hc structure
                    final_hc_data[final_hc_data_idx]["data"] += series["data"].tolist()

        return json.dumps(final_hc_data)
Пример #26
0
    def check_gradient(self):
        log.info('check_gradient')
        TestQCFunctions.check_gradient(self)
        self.init_check()
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.dataset_monitor.event.wait(10)

        rdt = RecordDictionaryTool.load_from_granule(
            self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'],
                                      [1, 1, 0, 0, 1])
 def assert_granule_time(self, granule, target_time):
     """
     Assert that the granule's time matches the target time
     @param granule The granule to be searched
     @param target_time The time that should match the granule's overall time
     """
     rdt = RecordDictionaryTool.load_from_granule(granule)
     rdt_time = rdt['time'][0]
     log.debug("assert_granule_time granule time: %s", rdt_time)
     log.debug("assert_granule_time target timestamp: %s", target_time)
     self.assertEqual(rdt_time, target_time)
Пример #28
0
 def assert_granule_time(self, granule, target_time):
     """
     Assert that the granule's time matches the target time
     @param granule The granule to be searched
     @param target_time The time that should match the granule's overall time
     """
     rdt = RecordDictionaryTool.load_from_granule(granule)
     rdt_time = rdt['time'][0]
     log.debug("assert_granule_time granule time: %s", rdt_time)
     log.debug("assert_granule_time target timestamp: %s", target_time)
     self.assertEqual(rdt_time, target_time)
Пример #29
0
    def assert_raw_granules_ingested(self, count, payload_size):
        #--------------------------------------------------------------------------------
        # Test the slicing capabilities
        #--------------------------------------------------------------------------------
        data_retriever = DataRetrieverServiceClient()

        for i in range(0, count-1):
            granule = data_retriever.retrieve(dataset_id=self._raw_dataset_id, query={'tdoa':slice(i,i+1)})
            rdt = RecordDictionaryTool.load_from_granule(granule)

            log.info("Granule index: %d, time: %s, size: %s", i, rdt['time'][0], len(rdt['raw'][0]))
            self.assertEqual(payload_size, len(rdt['raw'][0]))
Пример #30
0
    def test_coverage_transform(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_parsed()
        stream_def_id = self.pubsub_management.create_stream_definition(
            'ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)

        stream_id, route = self.pubsub_management.create_stream(
            'example',
            exchange_point=self.exchange_point_name,
            stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)

        self.ingestion_management.persist_data_stream(
            stream_id=stream_id,
            ingestion_configuration_id=ingestion_config_id,
            dataset_id=dataset_id)
        self.addCleanup(self.ingestion_management.unpersist_data_stream,
                        stream_id, ingestion_config_id)
        publisher = StandaloneStreamPublisher(stream_id, route)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_parsed_rdt(rdt)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.event.wait(30))

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
        np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])

        np.testing.assert_array_almost_equal(rdt_out['conductivity_L1'],
                                             np.array([42.914]))
        np.testing.assert_array_almost_equal(rdt_out['temp_L1'],
                                             np.array([20.]))
        np.testing.assert_array_almost_equal(rdt_out['pressure_L1'],
                                             np.array([3.068]))
        np.testing.assert_array_almost_equal(
            rdt_out['density'], np.array([1021.7144739593881],
                                         dtype='float32'))
        np.testing.assert_array_almost_equal(
            rdt_out['salinity'], np.array([30.935132729668283],
                                          dtype='float32'))
Пример #31
0
    def test_out_of_band_retrieve(self):
        # Setup the environemnt
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(stream_id, dataset_id)
        
        # Fill the dataset
        self.publish_fake_data(stream_id, route)
        self.wait_until_we_have_enough_granules(dataset_id,40)

        # Retrieve the data
        granule = DataRetrieverService.retrieve_oob(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assertTrue((rdt['time'] == np.arange(40)).all())
Пример #32
0
 def wait_until_we_have_enough_granules(self, dataset_id='',data_size=40):
     '''
     Loops until there is a sufficient amount of data in the dataset
     '''
     done = False
     with gevent.Timeout(40):
         while not done:
             extents = self.dataset_management.dataset_extents(dataset_id, 'time')
             granule = self.data_retriever.retrieve_last_data_points(dataset_id, 1)
             rdt     = RecordDictionaryTool.load_from_granule(granule)
             if rdt['time'] and rdt['time'][0] != rdt._pdict.get_context('time').fill_value and extents >= data_size:
                 done = True
             else:
                 gevent.sleep(0.2)
Пример #33
0
    def assert_raw_granules_ingested(self, count, payload_size):
        #--------------------------------------------------------------------------------
        # Test the slicing capabilities
        #--------------------------------------------------------------------------------
        data_retriever = DataRetrieverServiceClient()

        for i in range(0, count - 1):
            granule = data_retriever.retrieve(dataset_id=self._raw_dataset_id,
                                              query={'tdoa': slice(i, i + 1)})
            rdt = RecordDictionaryTool.load_from_granule(granule)

            log.info("Granule index: %d, time: %s, size: %s", i,
                     rdt['time'][0], len(rdt['raw'][0]))
            self.assertEqual(payload_size, len(rdt['raw'][0]))
Пример #34
0
    def test_replay_with_parameters(self):
        #--------------------------------------------------------------------------------
        # Create the configurations and the dataset
        #--------------------------------------------------------------------------------
        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        

        stream_def_id = self.pubsub_management.create_stream_definition('replay_stream', parameter_dictionary_id=pdict_id)
        
        stream_id, route  = self.pubsub_management.create_stream('replay_with_params', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        config_id  = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        self.publish_fake_data(stream_id, route)

        self.assertTrue(dataset_monitor.wait())

        query = {
            'start_time': 0 - 2208988800,
            'end_time':   19 - 2208988800,
            'stride_time' : 2,
            'parameters': ['time','temp']
        }
        retrieved_data = self.data_retriever.retrieve(dataset_id=dataset_id,query=query)

        rdt = RecordDictionaryTool.load_from_granule(retrieved_data)
        np.testing.assert_array_equal(rdt['time'], np.arange(0,20,2))
        self.assertEquals(set(rdt.iterkeys()), set(['time','temp']))

        extents = self.dataset_management.dataset_extents(dataset_id=dataset_id, parameters=['time','temp'])
        self.assertTrue(extents['time']>=20)
        self.assertTrue(extents['temp']>=20)
Пример #35
0
    def test_ingestion_gap_analysis(self):
        stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        connection1 = uuid4().hex
        connection2 = uuid4().hex

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [0]
        rdt['temp'] = [0]
        self.publish_and_wait(
            dataset_id,
            rdt.to_granule(connection_id=connection1, connection_index='0'))
        rdt['time'] = [1]
        rdt['temp'] = [1]
        self.publish_and_wait(
            dataset_id,
            rdt.to_granule(connection_id=connection1, connection_index='1'))
        rdt['time'] = [2]
        rdt['temp'] = [2]
        self.publish_and_wait(dataset_id,
                              rdt.to_granule(
                                  connection_id=connection1,
                                  connection_index='3'))  # Gap, missed message
        rdt['time'] = [3]
        rdt['temp'] = [3]
        self.publish_and_wait(dataset_id,
                              rdt.to_granule(
                                  connection_id=connection2,
                                  connection_index='3'))  # Gap, new connection
        rdt['time'] = [4]
        rdt['temp'] = [4]
        self.publish_and_wait(
            dataset_id,
            rdt.to_granule(connection_id=connection2, connection_index='4'))
        rdt['time'] = [5]
        rdt['temp'] = [5]
        self.publish_and_wait(
            dataset_id,
            rdt.to_granule(connection_id=connection2, connection_index='5'))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(6))
        np.testing.assert_array_equal(rdt['temp'], np.arange(6))
        return dataset_id
Пример #36
0
    def test_retrieve_and_transform(self):

        # Stream definition for the CTD data
        pdict_id             = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id        = self.pubsub_management.create_stream_definition('ctd data', parameter_dictionary_id=pdict_id)
        ctd_stream_id, route = self.pubsub_management.create_stream('ctd stream', 'xp1', stream_definition_id=stream_def_id)


        # Stream definition for the salinity data
        salinity_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        sal_stream_def_id = self.pubsub_management.create_stream_definition('sal data', parameter_dictionary_id=salinity_pdict_id)

        ingest_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        #--------------------------------------------------------------------------------
        # Again with this ridiculous problem
        #--------------------------------------------------------------------------------
        self.get_datastore(dataset_id)
        self.ingestion_management.persist_data_stream(stream_id=ctd_stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id)

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = np.random.randn(10) * 10 + 30
        rdt['conductivity'] = np.random.randn(10) * 2 + 10

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        publisher.publish(rdt.to_granule())

        rdt['time'] = np.arange(10,20)

        publisher.publish(rdt.to_granule())


        self.wait_until_we_have_enough_granules(dataset_id, 2)

        granule = self.data_retriever.retrieve(dataset_id, 
                                             None,
                                             None, 
                                             'ion.processes.data.transforms.ctd.ctd_L2_salinity',
                                             'CTDL2SalinityTransformAlgorithm', 
                                             kwargs=dict(params=sal_stream_def_id))
        rdt = RecordDictionaryTool.load_from_granule(granule)
        for i in rdt['salinity']:
            self.assertNotEquals(i,0)
Пример #37
0
    def test_global_range_test(self):
        TestQCFunctions.test_global_range_test(self)

        flagged = Event()
        def cb(event, *args, **kwargs):
            times = event.temporal_values
            self.assertEquals(times,[0.0, 7.0])
            flagged.set()

        event_subscriber = EventSubscriber(event_type=OT.ParameterQCEvent,origin=self.dataset_id, callback=cb, auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.dataset_monitor.event.wait(10)

        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_glblrng_qc'], [0, 1, 1, 1, 1, 1, 1, 0])
        self.assertTrue(flagged.wait(10))
Пример #38
0
    def test_retrieve_and_transform(self):
        # Make a simple dataset and start ingestion, pretty standard stuff.
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset()
        self.start_ingestion(ctd_stream_id, dataset_id)

        # Stream definition for the salinity data
        salinity_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            "ctd_parsed_param_dict", id_only=True
        )
        sal_stream_def_id = self.pubsub_management.create_stream_definition(
            "sal data", parameter_dictionary_id=salinity_pdict_id
        )

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt["time"] = np.arange(10)
        rdt["temp"] = np.random.randn(10) * 10 + 30
        rdt["conductivity"] = np.random.randn(10) * 2 + 10
        rdt["pressure"] = np.random.randn(10) * 1 + 12

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        publisher.publish(rdt.to_granule())

        rdt["time"] = np.arange(10, 20)

        publisher.publish(rdt.to_granule())

        self.wait_until_we_have_enough_granules(dataset_id, 20)

        granule = self.data_retriever.retrieve(
            dataset_id,
            None,
            None,
            "ion.processes.data.transforms.ctd.ctd_L2_salinity",
            "CTDL2SalinityTransformAlgorithm",
            kwargs=dict(params=sal_stream_def_id),
        )
        rdt = RecordDictionaryTool.load_from_granule(granule)
        for i in rdt["salinity"]:
            self.assertNotEquals(i, 0)
        self.streams.append(ctd_stream_id)
        self.stop_ingestion(ctd_stream_id)
Пример #39
0
    def test_retrieve_and_transform(self):
        # Make a simple dataset and start ingestion, pretty standard stuff.
        ctd_stream_id, route, stream_def_id, dataset_id = self.make_simple_dataset(
        )
        self.start_ingestion(ctd_stream_id, dataset_id)

        # Stream definition for the salinity data
        salinity_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        sal_stream_def_id = self.pubsub_management.create_stream_definition(
            'sal data', parameter_dictionary_id=salinity_pdict_id)

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(10)
        rdt['temp'] = np.random.randn(10) * 10 + 30
        rdt['conductivity'] = np.random.randn(10) * 2 + 10
        rdt['pressure'] = np.random.randn(10) * 1 + 12

        publisher = StandaloneStreamPublisher(ctd_stream_id, route)
        publisher.publish(rdt.to_granule())

        rdt['time'] = np.arange(10, 20)

        publisher.publish(rdt.to_granule())

        self.wait_until_we_have_enough_granules(dataset_id, 20)

        granule = self.data_retriever.retrieve(
            dataset_id,
            None,
            None,
            'ion.processes.data.transforms.ctd.ctd_L2_salinity',
            'CTDL2SalinityTransformAlgorithm',
            kwargs=dict(params=sal_stream_def_id))
        rdt = RecordDictionaryTool.load_from_granule(granule)
        for i in rdt['salinity']:
            self.assertNotEquals(i, 0)
        self.streams.append(ctd_stream_id)
        self.stop_ingestion(ctd_stream_id)
    def check_localrange(self):
        log.info('check_localrange')
        TestQCFunctions.check_localrange(self)
        self.init_check()

        flagged = Event()
        def cb(event, *args, **kwargs):
            times = event.temporal_values
            if not event.qc_parameter == 'tempwat_loclrng_qc':
                return
            np.testing.assert_array_equal( times, np.array([ 3580144708.7555027, 3580144709.7555027, 3580144710.7555027, 3580144711.7555027, 3580144712.7555027]))
            flagged.set()

        event_subscriber = EventSubscriber(event_type = OT.ParameterQCEvent, origin=self.dp_id, callback=cb, auto_delete=True)
        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)
        self.assertTrue(self.dataset_monitor.wait())
        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_almost_equal(rdt['tempwat_loclrng_qc'], [1 ,1 ,1 ,1 ,1 ,0 ,0 ,0 ,0 ,0])
        self.assertTrue(flagged.wait(10))
    def check_fill_values(self):
        log.info('check_fill_values')
        self.new_rdt()
        self.init_check()
        self.rdt['time'] = np.arange(5)
        self.rdt['temp'] = [12] * 5
        self.rdt.fetch_lookup_values()

        np.testing.assert_array_equal(self.rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(self.rdt['tempwat_gradtst_qc'], [-99] * 5)
        self.ph.publish_rdt_to_data_product(self.dp_id, self.rdt)

        self.assertTrue(self.dataset_monitor.wait())
        rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(self.dataset_id))
        np.testing.assert_array_equal(rdt['tempwat_glblrng_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_spketst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_stuckvl_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_trndtst_qc'], [-99] * 5)
        np.testing.assert_array_equal(rdt['tempwat_gradtst_qc'], [-99] * 5)
    def get_last_granule(self, data_product_id=''):
        # Provides information for users who have in the past acquired this data product, but for which that acquisition was terminated
        ret = IonObject(OT.ComputedDictValue)
        ret.value = {}
        try:
            dataset_ids, _ = self.clients.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasDataset, id_only=True)
            if not dataset_ids:
                ret.status = ComputedValueAvailability.NOTAVAILABLE
                ret.reason = "No dataset associated with this data product"
            else:
                replay_granule = self.clients.data_retriever.retrieve_last_data_points(dataset_ids[0], number_of_points=1)
                #replay_granule = self.clients.data_retriever.retrieve_last_granule(dataset_ids[0])
                rdt = RecordDictionaryTool.load_from_granule(replay_granule)
                ret.value =  {k : rdt[k].tolist() for k,v in rdt.iteritems()}
                ret.status = ComputedValueAvailability.PROVIDED
        except NotFound:
            ret.status = ComputedValueAvailability.NOTAVAILABLE
            ret.reason = "FIXME: this message should say why the calculation couldn't be done"
        except Exception as e:
            raise e

        return ret
Пример #43
0
 def test_repersist_data(self):
     stream_id = self.pubsub_management.create_stream()
     config_id = self.get_ingestion_config()
     dataset_id = self.create_dataset()
     self.ingestion_management.persist_data_stream(
         stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
     )
     self.get_datastore(dataset_id)
     self.publish_hifi(stream_id, 0)
     self.publish_hifi(stream_id, 1)
     self.wait_until_we_have_enough_granules(dataset_id, 2)
     self.ingestion_management.unpersist_data_stream(stream_id=stream_id, ingestion_configuration_id=config_id)
     self.ingestion_management.persist_data_stream(
         stream_id=stream_id, ingestion_configuration_id=config_id, dataset_id=dataset_id
     )
     self.publish_hifi(stream_id, 2)
     self.publish_hifi(stream_id, 3)
     self.wait_until_we_have_enough_granules(dataset_id, 4)
     retrieved_granule = self.data_retriever.retrieve(dataset_id)
     rdt = RecordDictionaryTool.load_from_granule(retrieved_granule)
     comp = rdt["time"] == np.arange(0, 40)
     self.assertTrue(comp.all(), "Uh-oh: %s" % rdt["time"])
Пример #44
0
    def test_coverage_transform(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_parsed()
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)

        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id)
        self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)
        publisher = StandaloneStreamPublisher(stream_id, route)
        
        rdt = ph.get_rdt(stream_def_id)
        ph.fill_parsed_rdt(rdt)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.wait())

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
        np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])

        np.testing.assert_allclose(rdt_out['conductivity_L1'], np.array([42.914]))
        np.testing.assert_allclose(rdt_out['temp_L1'], np.array([20.]))
        np.testing.assert_allclose(rdt_out['pressure_L1'], np.array([3.068]))
        np.testing.assert_allclose(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
        np.testing.assert_allclose(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))
Пример #45
0
    def test_activate_rsn_vel3d(self):

        log.info(
            "--------------------------------------------------------------------------------------------------------"
        )
        # load_parameter_scenarios
        self.container.spawn_process(
            "Loader",
            "ion.processes.bootstrap.ion_loader",
            "IONLoader",
            config=dict(
                op="load",
                scenario="BETA",
                path="master",
                categories=
                "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                clearcols="owner_id,org_ids",
                assets="res/preload/r2_ioc/ooi_assets",
                parseooi="True",
            ))

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='Vel3DMModel',
                                  description="Vel3DMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('test_activate_rsn_vel3d new InstrumentModel id = %s ',
                  instModel_id)

        raw_config = StreamConfiguration(stream_name='raw',
                                         parameter_dictionary_name='raw')
        vel3d_b_sample = StreamConfiguration(
            stream_name='vel3d_b_sample',
            parameter_dictionary_name='vel3d_b_sample')
        vel3d_b_engineering = StreamConfiguration(
            stream_name='vel3d_b_engineering',
            parameter_dictionary_name='vel3d_b_engineering')

        RSN_VEL3D_01 = {
            'DEV_ADDR': "10.180.80.6",
            'DEV_PORT': 2101,
            'DATA_PORT': 1026,
            'CMD_PORT': 1025,
            'PA_BINARY': "port_agent"
        }

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='Vel3DAgent',
            description="Vel3DAgent",
            driver_uri=
            "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg",
            stream_configurations=[
                raw_config, vel3d_b_sample, vel3d_b_engineering
            ])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('test_activate_rsn_vel3d new InstrumentAgent id = %s',
                  instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activate_rsn_vel3d: Create instrument resource to represent the Vel3D '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='Vel3DDevice',
                                   description="Vel3DDevice",
                                   serial_number="12345")
        instDevice_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)
        log.debug("test_activate_rsn_vel3d: new InstrumentDevice id = %s  ",
                  instDevice_id)

        port_agent_config = {
            'device_addr': '10.180.80.6',
            'device_port': 2101,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': 1025,
            'data_port': 1026,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='Vel3DAgentInstance',
                                          description="Vel3DAgentInstance",
                                          port_agent_config=port_agent_config,
                                          alerts=[])

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        parsed_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'vel3d_b_sample', id_only=True)
        parsed_sample_stream_def_id = self.pubsubcli.create_stream_definition(
            name='vel3d_b_sample',
            parameter_dictionary_id=parsed_sample_pdict_id)

        parsed_eng_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'vel3d_b_engineering', id_only=True)
        parsed_eng_stream_def_id = self.pubsubcli.create_stream_definition(
            name='vel3d_b_engineering',
            parameter_dictionary_id=parsed_eng_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'raw', id_only=True)
        raw_stream_def_id = self.pubsubcli.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='vel3d_b_sample',
                           description='vel3d_b_sample')

        sample_data_product_id = self.dpclient.create_data_product(
            data_product=dp_obj,
            stream_definition_id=parsed_sample_stream_def_id)
        log.debug('new dp_id = %s', sample_data_product_id)
        self.dpclient.activate_data_product_persistence(
            data_product_id=sample_data_product_id)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=sample_data_product_id)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(sample_data_product_id,
                                                   PRED.hasStream, None, True)
        log.debug('sample_data_product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(sample_data_product_id,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for sample_data_product = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]

        pid = self.create_logger('vel3d_b_sample', stream_ids[0])
        self.loggerpids.append(pid)

        dp_obj = IonObject(RT.DataProduct,
                           name='vel3d_b_engineering',
                           description='vel3d_b_engineering')

        eng_data_product_id = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_eng_stream_def_id)
        log.debug('new dp_id = %s', eng_data_product_id)
        self.dpclient.activate_data_product_persistence(
            data_product_id=eng_data_product_id)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=eng_data_product_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test')

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', data_product_id2)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                   PRED.hasStream, None, True)
        log.debug('test_activate_rsn_vel3d Data product streams2 = %s',
                  str(stream_ids))

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('test_activate_rsn_vel3d Data set for data_product_id2 = %s',
                  dataset_ids[0])
        self.raw_dataset = dataset_ids[0]

        def start_instrument_agent():
            self.imsclient.start_instrument_agent_instance(
                instrument_agent_instance_id=instAgentInstance_id)

        gevent.joinall([gevent.spawn(start_instrument_agent)])

        #cleanup
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     instDevice_id, ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(instDevice_id,
                                              to_name=gate.process_id,
                                              process=FakeProcess())

        def check_state(label, desired_state):
            actual_state = self._ia_client.get_agent_state()
            log.debug("%s instrument agent is in state '%s'", label,
                      actual_state)
            self.assertEqual(desired_state, actual_state)

        log.debug("test_activate_rsn_vel3d: got ia client %s",
                  str(self._ia_client))

        check_state("just-spawned", ResourceAgentState.UNINITIALIZED)

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activate_rsn_vel3d: initialize %s", str(retval))
        check_state("initialized", ResourceAgentState.INACTIVE)

        log.debug("test_activate_rsn_vel3d Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activate_rsn_vel3d: return value from go_active %s",
                  str(reply))
        check_state("activated", ResourceAgentState.IDLE)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("current state after sending go_active command %s",
                  str(state))
        #
        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activate_rsn_vel3d: run %s", str(reply))
        check_state("commanded", ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("current state after sending run command %s", str(state))

        #        cmd = AgentCommand(command=ProtocolEvent.START_AUTOSAMPLE)
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activate_rsn_vel3d: run %s" , str(reply))
        #        state = self._ia_client.get_agent_state()
        #        self.assertEqual(ResourceAgentState.COMMAND, state)
        #
        #        gevent.sleep(5)
        #
        #        cmd = AgentCommand(command=ProtocolEvent.STOP_AUTOSAMPLE)
        #        reply = self._ia_client.execute_agent(cmd)
        #        log.debug("test_activate_rsn_vel3d: run %s" , str(reply))
        #        state = self._ia_client.get_agent_state()
        #        self.assertEqual(ResourceAgentState.COMMAND, state)
        #
        #        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        #        retval = self._ia_client.execute_agent(cmd)
        #        state = retval.result
        #        log.debug("current state after sending STOP_AUTOSAMPLE command %s" , str(state))

        #
        #        cmd = AgentCommand(command=ResourceAgentEvent.PAUSE)
        #        retval = self._ia_client.execute_agent(cmd)
        #        state = self._ia_client.get_agent_state()
        #        self.assertEqual(ResourceAgentState.STOPPED, state)
        #
        #        cmd = AgentCommand(command=ResourceAgentEvent.RESUME)
        #        retval = self._ia_client.execute_agent(cmd)
        #        state = self._ia_client.get_agent_state()
        #        self.assertEqual(ResourceAgentState.COMMAND, state)
        #
        #        cmd = AgentCommand(command=ResourceAgentEvent.CLEAR)
        #        retval = self._ia_client.execute_agent(cmd)
        #        state = self._ia_client.get_agent_state()
        #        self.assertEqual(ResourceAgentState.IDLE, state)
        #
        #        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        #        retval = self._ia_client.execute_agent(cmd)
        #        state = self._ia_client.get_agent_state()
        #        self.assertEqual(ResourceAgentState.COMMAND, state)

        log.debug("test_activate_rsn_vel3d: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activate_rsn_vel3d: return from reset %s", str(reply))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset)
        self.assertIsInstance(replay_data_raw, Granule)
        rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw)
        log.debug("RDT raw: %s", str(rdt_raw.pretty_print()))

        self.assertIn('raw', rdt_raw)
        raw_vals = rdt_raw['raw']

        #--------------------------------------------------------------------------------
        # Deactivate loggers
        #--------------------------------------------------------------------------------

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        self.dpclient.delete_data_product(sample_data_product_id)
        self.dpclient.delete_data_product(eng_data_product_id)
        self.dpclient.delete_data_product(data_product_id2)
Пример #46
0
    def test_sparse_values(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_sparse()
        stream_def_id = self.pubsub_management.create_stream_definition('sparse', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
        stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)
        dataset_id = self.create_dataset(pdict_id)
        self.start_ingestion(stream_id,dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        # Publish initial granule
        # the first one has the sparse value set inside it, sets lat to 45 and lon to -71
        ntp_now = time.time() + 2208988800
        rdt = ph.get_rdt(stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['internal_timestamp'] = [ntp_now]
        rdt['temp'] = [300000]
        rdt['preferred_timestamp'] = ['driver_timestamp']
        rdt['port_timestamp'] = [ntp_now]
        rdt['quality_flag'] = ['']
        rdt['lat'] = [45]
        rdt['conductivity'] = [4341400]
        rdt['driver_timestamp'] = [ntp_now]
        rdt['lon'] = [-71]
        rdt['pressure'] = [256.8]

        publisher = StandaloneStreamPublisher(stream_id, route)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.reset()

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        # Check the values and make sure they're correct
        np.testing.assert_allclose(rdt_out['time'], rdt['time'])
        np.testing.assert_allclose(rdt_out['temp'], rdt['temp'])
        np.testing.assert_allclose(rdt_out['lat'], np.array([45]))
        np.testing.assert_allclose(rdt_out['lon'], np.array([-71]))

        np.testing.assert_allclose(rdt_out['conductivity_L1'], np.array([42.914]))
        np.testing.assert_allclose(rdt_out['temp_L1'], np.array([20.]))
        np.testing.assert_allclose(rdt_out['pressure_L1'], np.array([3.068]))
        np.testing.assert_allclose(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
        np.testing.assert_allclose(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))


        # We're going to change the lat/lon
        rdt = ph.get_rdt(stream_def_id)
        rdt['time'] = time.time() + 2208988800
        rdt['lat'] = [46]
        rdt['lon'] = [-73]
        
        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.wait())
        dataset_monitor.reset()


        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_allclose(rdt_out['time'], rdt['time'])
        
        for i in xrange(9):
            ntp_now = time.time() + 2208988800
            rdt['time'] = [ntp_now]
            rdt['internal_timestamp'] = [ntp_now]
            rdt['temp'] = [300000]
            rdt['preferred_timestamp'] = ['driver_timestamp']
            rdt['port_timestamp'] = [ntp_now]
            rdt['quality_flag'] = [None]
            rdt['conductivity'] = [4341400]
            rdt['driver_timestamp'] = [ntp_now]
            rdt['pressure'] = [256.8]

            publisher.publish(rdt.to_granule())
            self.assertTrue(dataset_monitor.wait())
            dataset_monitor.reset()

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_allclose(rdt_out['pressure'], np.array([256.8] * 10))
        np.testing.assert_allclose(rdt_out['lat'], np.array([45] + [46] * 9))
        np.testing.assert_allclose(rdt_out['lon'], np.array([-71] + [-73] * 9))
Пример #47
0
    def _process_visualization_message(self, messages, callback, reqId):

        gdt_description = []
        gdt_content = []
        viz_product_type = 'google_dt'  # defaults to google_dt unless overridden by the message

        for message in messages:

            if message == None:
                continue

            message_data = message.body

            if isinstance(message_data,Granule):

                rdt = RecordDictionaryTool.load_from_granule(message_data)
                gdt_components = get_safe(rdt, 'google_dt_components')

                # IF this granule does not contain google dt, skip
                if gdt_components is None:
                    continue

                gdt_component = gdt_components[0]
                viz_product_type = gdt_component['viz_product_type']

                # Process Google DataTable messages
                if viz_product_type == 'google_dt':

                    # If the data description is being put together for the first time,
                    # switch the time format from float to datetime
                    if (gdt_description == []):
                        temp_gdt_description = gdt_component['data_description']
                        gdt_description = [('time', 'datetime', 'time')]

                        for idx in range(1,len(temp_gdt_description)):
                            # for some weird reason need to force convert to tuples
                            temp_arr = temp_gdt_description[idx]
                            if temp_arr != None and temp_arr[0] != 'time':
                                gdt_description.append((temp_arr[0], temp_arr[1], temp_arr[2]))

                    # append all content to one big array
                    temp_gdt_content = gdt_component['data_content']
                    for tempTuple in temp_gdt_content:
                        # sometimes there are inexplicable empty tuples in the content. Drop them
                        if tempTuple == [] or len(tempTuple) == 0:
                            continue

                        varTuple = []

                        #varTuple.append(datetime.fromtimestamp(tempTuple[0]))
                        varTuple.append(datetime.fromtimestamp(tempTuple[0]))

                        for idx in range(1,len(tempTuple)):
                            # some silly numpy format won't go away so need to cast numbers to floats
                            if(gdt_description[idx][1] == 'number'):
                                varTuple.append(float(tempTuple[idx]))
                            else:
                                varTuple.append(tempTuple[idx])

                        gdt_content.append(varTuple)


                #TODO - what to do if this is not a valid visualization message?


        # Now that all the messages have been parsed, any last processing should be done here
        if viz_product_type == "google_dt":
            # Using the description and content, build the google data table
            if (gdt_description):
                gdt = gviz_api.DataTable(gdt_description)
                gdt.LoadData(gdt_content)

                # return the json version of the table
                if callback == '':
                    return gdt.ToJSonResponse(req_id = reqId)
                else:
                    return callback + "(\"" + gdt.ToJSonResponse(req_id = reqId) + "\")"

            # Handle case where there is no data for constructing a GDT
            else:
                if callback == '':
                    return None
                else:
                    return callback + "(\"" + None + "\")"





        return None
Пример #48
0
    def get_visualization_data(self, data_product_id='', visualization_parameters=None, callback='', tqx=""):
        """Retrieves the data for the specified DP

        @param data_product_id    str
        @param visualization_parameters    str
        @param callback     str
        @retval jsonp_visualization_data str
        @throws NotFound    object with specified id, query does not exist
        """

        # error check
        if not data_product_id:
            raise BadRequest("The data_product_id parameter is missing")

        use_direct_access = False
        if visualization_parameters == {}:
            visualization_parameters = None

        reqId = 0
        # If a reqId was passed in tqx, extract it
        if tqx:
            tqx_param_list = tqx.split(";")
            for param in tqx_param_list:
                key, value = param.split(":")
                if key == 'reqId':
                    reqId = value

        # Extract the parameters. Definitely init first
        query = None
        if visualization_parameters:
            #query = {'parameters':[]}
            query = {}
            # Error check and damage control. Definitely need time
            if 'parameters' in visualization_parameters and len(visualization_parameters['parameters']) > 0:
                if not 'time' in visualization_parameters['parameters']:
                    visualization_parameters['parameters'].append('time')

                query['parameters'] = visualization_parameters['parameters']

            # The times passed from UI are system times so convert them to NTP
            if 'start_time' in visualization_parameters:
                #query['start_time'] = int(ntplib.system_to_ntp_time(float(visualization_parameters['start_time'])))
                query['start_time'] = int(visualization_parameters['start_time'])

            if 'end_time' in visualization_parameters:
                #query['end_time'] = int(ntplib.system_to_ntp_time(float(visualization_parameters['end_time'])))
                query['end_time'] = int((visualization_parameters['end_time']))

            # stride time
            if 'stride_time' in visualization_parameters:
                query['stride_time'] = int(visualization_parameters['stride_time'])
            else:
                query['stride_time'] == 1

            # direct access parameter
            if 'use_direct_access' in visualization_parameters:
                if (int(visualization_parameters['use_direct_access']) == 1):
                    use_direct_access = True
                else:
                    use_direct_access = False

        # get the dataset_id associated with the data_product. Need it to do the data retrieval
        ds_ids,_ = self.clients.resource_registry.find_objects(data_product_id, PRED.hasDataset, RT.Dataset, True)

        if ds_ids is None or not ds_ids:
            raise NotFound("Could not find dataset associated with data product")

        if use_direct_access:
            retrieved_granule = DataRetrieverService.retrieve_oob(ds_ids[0], query=query)
        else:
            #replay_granule = self.clients.data_retriever.retrieve(ds_ids[0],{'start_time':0,'end_time':2})
            retrieved_granule = self.clients.data_retriever.retrieve(ds_ids[0], query=query)

        if retrieved_granule is None:
            return None

        #temp_rdt = RecordDictionaryTool.load_from_granule(retrieved_granule)

        # send the granule through the transform to get the google datatable
        gdt_pdict_id = self.clients.dataset_management.read_parameter_dictionary_by_name('google_dt',id_only=True)
        gdt_stream_def = self.clients.pubsub_management.create_stream_definition('gdt', parameter_dictionary_id=gdt_pdict_id)

        gdt_data_granule = VizTransformGoogleDTAlgorithm.execute(retrieved_granule, params=gdt_stream_def, config=visualization_parameters)
        if gdt_data_granule == None:
            return None

        gdt_rdt = RecordDictionaryTool.load_from_granule(gdt_data_granule)
        gdt_components = get_safe(gdt_rdt, 'google_dt_components')
        gdt_component = gdt_components[0]
        temp_gdt_description = gdt_component["data_description"]
        temp_gdt_content = gdt_component["data_content"]

        # adjust the 'float' time to datetime in the content
        gdt_description = [('time', 'datetime', 'time')]
        gdt_content = []
        for idx in range(1,len(temp_gdt_description)):
            temp_arr = temp_gdt_description[idx]
            if temp_arr != None and temp_arr[0] != 'time':
                gdt_description.append((temp_arr[0], temp_arr[1], temp_arr[2]))

        for tempTuple in temp_gdt_content:
            # sometimes there are inexplicable empty tuples in the content. Drop them
            if tempTuple == [] or len(tempTuple) == 0:
                continue

            varTuple = []
            varTuple.append(datetime.fromtimestamp(tempTuple[0]))
            for idx in range(1,len(tempTuple)):
                # some silly numpy format won't go away so need to cast numbers to floats
                if(gdt_description[idx][1] == 'number'):
                    if tempTuple[idx] == None:
                        varTuple.append(0.0)
                    else:
                        # Precision hardcoded for now. Needs to be on a per parameter basis
                        varTuple.append(round(float(tempTuple[idx]),5))
                else:
                    varTuple.append(tempTuple[idx])

            gdt_content.append(varTuple)

        # now generate the Google datatable out of the description and content
        gdt = gviz_api.DataTable(gdt_description)
        gdt.LoadData(gdt_content)

        # return the json version of the table
        if callback == '':
            return gdt.ToJSonResponse(req_id = reqId)
        else:
            return callback + "(\"" + gdt.ToJSonResponse(req_id = reqId) + "\")"
Пример #49
0
    def get_visualization_image(self, data_product_id='', visualization_parameters=None, callback=''):

        # Error check
        if not data_product_id:
            raise BadRequest("The data_product_id parameter is missing")
        if visualization_parameters == {}:
            visualization_parameters = None

        # Extract the retrieval related parameters. Definitely init all parameters first
        query = None
        image_name = None
        if visualization_parameters :
            #query = {'parameters':[]}
            query = {}
            # Error check and damage control. Definitely need time
            if 'parameters' in visualization_parameters:
                if not 'time' in visualization_parameters['parameters']:
                    visualization_parameters['parameters'].append('time')
                query['parameters'] = visualization_parameters['parameters']

            if 'stride_time' in visualization_parameters:
                query['stride_time'] = visualization_parameters['stride_time']
            if 'start_time' in visualization_parameters:
                query['start_time'] = visualization_parameters['start_time']
            if 'end_time' in visualization_parameters:
                query['end_time'] = visualization_parameters['end_time']

            # If an image_name was given, it means the image is already in the storage. Just need the latest granule.
            # ignore other paramters passed
            if 'image_name' in visualization_parameters:
                image_name = visualization_parameters['image_name']

        # get the dataset_id associated with the data_product. Need it to do the data retrieval
        ds_ids,_ = self.clients.resource_registry.find_objects(data_product_id, PRED.hasDataset, RT.Dataset, True)

        if ds_ids is None or not ds_ids:
            log.warn("Specified data_product does not have an associated dataset")
            return None

        # Ideally just need the latest granule to figure out the list of images
        if image_name:
            retrieved_granule = self.clients.data_retriever.retrieve_last_data_points(ds_ids[0], 10)
        else:
            retrieved_granule = self.clients.data_retriever.retrieve(ds_ids[0], query=query)

        #print " >>>>>>>>>>>> RETRIEVED GRANULE : " , retrieved_granule
        if retrieved_granule is None:
            return None

        # no need to pass data through transform if its already a pre-computed image
        if image_name:
            mpl_data_granule = retrieved_granule
        else:
            # send the granule through the transform to get the matplotlib graphs
            mpl_pdict_id = self.clients.dataset_management.read_parameter_dictionary_by_name('graph_image_param_dict',id_only=True)
            mpl_stream_def = self.clients.pubsub_management.create_stream_definition('mpl', parameter_dictionary_id=mpl_pdict_id)
            mpl_data_granule = VizTransformMatplotlibGraphsAlgorithm.execute(retrieved_granule, config=visualization_parameters, params=mpl_stream_def)

        if mpl_data_granule == None:
            return None

        mpl_rdt = RecordDictionaryTool.load_from_granule(mpl_data_granule)

        ret_dict = dict()
        ret_dict['content_type'] = (get_safe(mpl_rdt, "content_type"))[0]
        ret_dict['image_name'] = (get_safe(mpl_rdt, "image_name"))[0]
        # reason for encoding as base64 string is otherwise message pack complains about the bit stream
        ret_dict['image_obj'] = base64.encodestring((get_safe(mpl_rdt, "image_obj"))[0])

        if callback == '':
            return ret_dict
        else:
            return callback + "(" + simplejson.dumps(ret_dict) + ")"
Пример #50
0
    def test_lookup_values_ingest_replay(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_lookups()
        stream_def_id = self.pubsub_management.create_stream_definition(
            'lookups', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)

        stream_id, route = self.pubsub_management.create_stream(
            'example',
            exchange_point=self.exchange_point_name,
            stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)

        ingestion_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        config = DotDict()
        config.process.lookup_docs = ['test1', 'test2']
        self.ingestion_management.persist_data_stream(
            stream_id=stream_id,
            ingestion_configuration_id=ingestion_config_id,
            dataset_id=dataset_id,
            config=config)
        self.addCleanup(self.ingestion_management.unpersist_data_stream,
                        stream_id, ingestion_config_id)

        stored_value_manager = StoredValueManager(self.container)
        stored_value_manager.stored_value_cas('test1', {
            'offset_a': 10.0,
            'offset_b': 13.1
        })

        publisher = StandaloneStreamPublisher(stream_id, route)
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = [20.0] * 20

        granule = rdt.to_granule()

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        publisher.publish(granule)
        self.assertTrue(dataset_monitor.event.wait(30))

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], np.arange(20))
        np.testing.assert_array_almost_equal(rdt_out['temp'],
                                             np.array([20.] * 20))
        np.testing.assert_array_almost_equal(rdt_out['calibrated'],
                                             np.array([30.] * 20))
        np.testing.assert_array_equal(
            rdt_out['offset_b'],
            np.array([rdt_out.fill_value('offset_b')] * 20))

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = np.arange(20, 40)
        rdt['temp'] = [20.0] * 20
        granule = rdt.to_granule()

        dataset_monitor.event.clear()

        stored_value_manager.stored_value_cas('test1', {'offset_a': 20.0})
        stored_value_manager.stored_value_cas('coefficient_document',
                                              {'offset_b': 10.0})
        gevent.sleep(2)

        publisher.publish(granule)
        self.assertTrue(dataset_monitor.event.wait(30))

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], np.arange(40))
        np.testing.assert_array_almost_equal(rdt_out['temp'],
                                             np.array([20.] * 20 + [20.] * 20))
        np.testing.assert_array_equal(rdt_out['offset_b'],
                                      np.array([10.] * 40))
        np.testing.assert_array_almost_equal(rdt_out['calibrated'],
                                             np.array([30.] * 20 + [40.] * 20))
        np.testing.assert_array_almost_equal(rdt_out['calibrated_b'],
                                             np.array([40.] * 20 + [50.] * 20))
Пример #51
0
    def test_sparse_values(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_sparse()
        stream_def_id = self.pubsub_management.create_stream_definition(
            'sparse', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)
        stream_id, route = self.pubsub_management.create_stream(
            'example',
            exchange_point=self.exchange_point_name,
            stream_definition_id=stream_def_id)
        self.addCleanup(self.pubsub_management.delete_stream, stream_id)
        dataset_id = self.create_dataset(pdict_id)
        self.start_ingestion(stream_id, dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        ntp_now = time.time() + 2208988800
        rdt = ph.get_rdt(stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['internal_timestamp'] = [ntp_now]
        rdt['temp'] = [300000]
        rdt['preferred_timestamp'] = ['driver_timestamp']
        rdt['port_timestamp'] = [ntp_now]
        rdt['quality_flag'] = [None]
        rdt['lat'] = [45]
        rdt['conductivity'] = [4341400]
        rdt['driver_timestamp'] = [ntp_now]
        rdt['lon'] = [-71]
        rdt['pressure'] = [256.8]

        publisher = StandaloneStreamPublisher(stream_id, route)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.event.wait(30))
        dataset_monitor.event.clear()

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)

        np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
        np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])
        np.testing.assert_array_almost_equal(rdt_out['lat'], np.array([45]))
        np.testing.assert_array_almost_equal(rdt_out['lon'], np.array([-71]))

        np.testing.assert_array_almost_equal(rdt_out['conductivity_L1'],
                                             np.array([42.914]))
        np.testing.assert_array_almost_equal(rdt_out['temp_L1'],
                                             np.array([20.]))
        np.testing.assert_array_almost_equal(rdt_out['pressure_L1'],
                                             np.array([3.068]))
        np.testing.assert_array_almost_equal(
            rdt_out['density'], np.array([1021.7144739593881],
                                         dtype='float32'))
        np.testing.assert_array_almost_equal(
            rdt_out['salinity'], np.array([30.935132729668283],
                                          dtype='float32'))

        rdt = ph.get_rdt(stream_def_id)
        rdt['lat'] = [46]
        rdt['lon'] = [-73]

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.event.wait(30))
        dataset_monitor.event.clear()

        rdt = ph.get_rdt(stream_def_id)
        rdt['lat'] = [1000]
        rdt['lon'] = [3]

        publisher.publish(rdt.to_granule())

        rdt = ph.get_rdt(stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['internal_timestamp'] = [ntp_now]
        rdt['temp'] = [300000]
        rdt['preferred_timestamp'] = ['driver_timestamp']
        rdt['port_timestamp'] = [ntp_now]
        rdt['quality_flag'] = [None]
        rdt['conductivity'] = [4341400]
        rdt['driver_timestamp'] = [ntp_now]
        rdt['pressure'] = [256.8]

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_monitor.event.wait(30))
        dataset_monitor.event.clear()

        replay_granule = self.data_retriever.retrieve(dataset_id)
        rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
        np.testing.assert_array_almost_equal(rdt_out['lat'], np.array([45,
                                                                       46]))
        np.testing.assert_array_almost_equal(rdt_out['lon'],
                                             np.array([-71, -73]))
Пример #52
0
    def test_dm_end_2_end(self):
        #--------------------------------------------------------------------------------
        # Set up a stream and have a mock instrument (producer) send data
        #--------------------------------------------------------------------------------
        self.event.clear()

        # Get a precompiled parameter dictionary with basic ctd fields
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',id_only=True)
        context_ids = self.dataset_management.read_parameter_contexts(pdict_id, id_only=True)

        # Add a field that supports binary data input.
        bin_context = ParameterContext('binary',  param_type=ArrayType())
        context_ids.append(self.dataset_management.create_parameter_context('binary', bin_context.dump()))
        # Add another field that supports dictionary elements.
        rec_context = ParameterContext('records', param_type=RecordType())
        context_ids.append(self.dataset_management.create_parameter_context('records', rec_context.dump()))

        pdict_id = self.dataset_management.create_parameter_dictionary('replay_pdict', parameter_context_ids=context_ids, temporal_context='time')
        
        stream_definition = self.pubsub_management.create_stream_definition('ctd data', parameter_dictionary_id=pdict_id)


        stream_id, route = self.pubsub_management.create_stream('producer', exchange_point=self.exchange_point_name, stream_definition_id=stream_definition)

        #--------------------------------------------------------------------------------
        # Start persisting the data on the stream 
        # - Get the ingestion configuration from the resource registry
        # - Create the dataset
        # - call persist_data_stream to setup the subscription for the ingestion workers
        #   on the stream that you specify which causes the data to be persisted
        #--------------------------------------------------------------------------------

        ingest_config_id = self.get_ingestion_config()
        dataset_id = self.create_dataset(pdict_id)
        self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingest_config_id, dataset_id=dataset_id)
        self.addCleanup(self.stop_ingestion, stream_id)

        #--------------------------------------------------------------------------------
        # Now the granules are ingesting and persisted
        #--------------------------------------------------------------------------------

        self.launch_producer(stream_id)
        self.wait_until_we_have_enough_granules(dataset_id,40)
        
        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------
        
        replay_data = self.data_retriever.retrieve(dataset_id)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        self.assertTrue((rdt['time'][:10] == np.arange(10)).all(),'%s' % rdt['time'][:])
        self.assertTrue((rdt['binary'][:10] == np.array(['hi']*10, dtype='object')).all())

        
        #--------------------------------------------------------------------------------
        # Now to try the streamed approach
        #--------------------------------------------------------------------------------
        replay_stream_id, replay_route = self.pubsub_management.create_stream('replay_out', exchange_point=self.exchange_point_name, stream_definition_id=stream_definition)
        self.replay_id, process_id =  self.data_retriever.define_replay(dataset_id=dataset_id, stream_id=replay_stream_id)
        log.info('Process ID: %s', process_id)

        replay_client = ReplayClient(process_id)

    
        #--------------------------------------------------------------------------------
        # Create the listening endpoint for the the retriever to talk to 
        #--------------------------------------------------------------------------------
        sub_id = self.pubsub_management.create_subscription(self.exchange_space_name,stream_ids=[replay_stream_id])
        self.addCleanup(self.pubsub_management.delete_subscription, sub_id)
        self.pubsub_management.activate_subscription(sub_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription, sub_id)
        subscriber = StandaloneStreamSubscriber(self.exchange_space_name, self.validate_granule_subscription)
        subscriber.start()
        self.addCleanup(subscriber.stop)

        self.data_retriever.start_replay_agent(self.replay_id)

        self.assertTrue(replay_client.await_agent_ready(5), 'The process never launched')
        replay_client.start_replay()
        
        self.assertTrue(self.event.wait(10))

        self.data_retriever.cancel_replay_agent(self.replay_id)


        #--------------------------------------------------------------------------------
        # Test the slicing capabilities
        #--------------------------------------------------------------------------------

        granule = self.data_retriever.retrieve(dataset_id=dataset_id, query={'tdoa':slice(0,5)})
        rdt = RecordDictionaryTool.load_from_granule(granule)
        b = rdt['time'] == np.arange(5)
        self.assertTrue(b.all() if not isinstance(b,bool) else b)
Пример #53
0
 def get_retrieve_client(self, dataset_id=''):
     replay_data = self.data_retriever.retrieve(dataset_id)
     rdt = RecordDictionaryTool.load_from_granule(replay_data)
     self.assertIsNotNone(rdt['c_wpt_y_lmc'])
Пример #54
0
    def test_activateInstrumentSample(self):

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        raw_config = StreamConfiguration(stream_name='raw',
                                         parameter_dictionary_name='raw')
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict')

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=DRV_URI_GOOD,
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        instDevice_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)
        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config,
                                          alerts=[])

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubcli.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'raw', id_only=True)
        raw_stream_def_id = self.pubsubcli.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', data_product_id1)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', data_product_id2)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        # setup notifications for the device and parsed data product
        user_id_1 = self._create_notification(user_name='user_1',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id1)
        #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users ---------
        user_id_2 = self._create_notification(user_name='user_2',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams2 = %s', str(stream_ids))

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id2 = %s', dataset_ids[0])
        self.raw_dataset = dataset_ids[0]

        #elastic search debug
        es_indexes, _ = self.container.resource_registry.find_resources(
            restype='ElasticSearchIndex')
        log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes])
        log.debug('Bootstrap %s', CFG.bootstrap.use_es)

        def start_instrument_agent():
            self.imsclient.start_instrument_agent_instance(
                instrument_agent_instance_id=instAgentInstance_id)

        gevent.joinall([gevent.spawn(start_instrument_agent)])

        #cleanup
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        #wait for start
        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = AgentProcessStateGate(self.processdispatchclient.read_process,
                                     instDevice_id, ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            gate.process_id)

        #log.trace('Instrument agent instance obj: = %s' , str(inst_agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(instDevice_id,
                                              to_name=gate.process_id,
                                              process=FakeProcess())

        log.debug("test_activateInstrumentSample: got ia client %s",
                  str(self._ia_client))

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: initialize %s", str(retval))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.INACTIVE, state)

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrument: return value from go_active %s",
                  str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.IDLE, state)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active command %s",
            str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: run %s", str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=ResourceAgentEvent.PAUSE)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.STOPPED, state)

        cmd = AgentCommand(command=ResourceAgentEvent.RESUME)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=ResourceAgentEvent.CLEAR)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.IDLE, state)

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(ResourceAgentState.COMMAND, state)

        cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE)
        for i in xrange(10):
            retval = self._ia_client.execute_resource(cmd)
            log.debug("test_activateInstrumentSample: return from sample %s",
                      str(retval))

        log.debug("test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s",
                  str(reply))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data_raw = self.dataretrieverclient.retrieve(self.raw_dataset)
        self.assertIsInstance(replay_data_raw, Granule)
        rdt_raw = RecordDictionaryTool.load_from_granule(replay_data_raw)
        log.debug("RDT raw: %s", str(rdt_raw.pretty_print()))

        self.assertIn('raw', rdt_raw)
        raw_vals = rdt_raw['raw']

        all_raw = "".join(raw_vals)

        # look for 't' entered after a prompt -- ">t"
        t_commands = all_raw.count(">t")

        if 10 != t_commands:
            log.error("%s raw_vals: ", len(raw_vals))
            for i, r in enumerate(raw_vals):
                log.error("raw val %s: %s", i, [r])
            self.fail("Expected 10 't' strings in raw_vals, got %s" %
                      t_commands)
        else:
            log.debug("%s raw_vals: ", len(raw_vals))
            for i, r in enumerate(raw_vals):
                log.debug("raw val %s: %s", i, [r])

        replay_data_parsed = self.dataretrieverclient.retrieve(
            self.parsed_dataset)
        self.assertIsInstance(replay_data_parsed, Granule)
        rdt_parsed = RecordDictionaryTool.load_from_granule(replay_data_parsed)
        log.debug("test_activateInstrumentSample: RDT parsed: %s",
                  str(rdt_parsed.pretty_print()))
        self.assertIn('temp', rdt_parsed)
        temp_vals = rdt_parsed['temp']
        pressure_vals = rdt_parsed['pressure']
        if 10 != len(temp_vals):
            log.error("%s temp_vals: %s", len(temp_vals), temp_vals)
            self.fail("Expected 10 temp_vals, got %s" % len(temp_vals))

        log.debug("l4-ci-sa-rq-138")
        """
        Physical resource control shall be subject to policy

        Instrument management control capabilities shall be subject to policy

        The actor accessing the control capabilities must be authorized to send commands.

        note from maurice 2012-05-18: Talk to tim M to verify that this is policy.  If it is then talk with Stephen to
                                      get an example of a policy test and use that to create a test stub that will be
                                      completed when we have instrument policies.

        Tim M: The "actor", aka observatory operator, will access the instrument through ION.

        """

        #--------------------------------------------------------------------------------
        # Get the extended data product to see if it contains the granules
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id1, user_id=user_id_1)

        def poller(extended_product):
            return len(extended_product.computed.user_notification_requests.
                       value) == 1

        poll(poller, extended_product, timeout=30)

        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id1,
            extended_data_product=extended_product)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_1)

        #--------------------------------------------------------------------------------
        # For the second user, check the extended data product and the extended intrument
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id2, user_id=user_id_2)
        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id2,
            extended_data_product=extended_product)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_2)
        self._check_computed_attributes_of_extended_instrument(
            expected_instrument_device_id=instDevice_id,
            extended_instrument=extended_instrument)

        #--------------------------------------------------------------------------------
        # Deactivate loggers
        #--------------------------------------------------------------------------------

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        self.dpclient.delete_data_product(data_product_id1)
        self.dpclient.delete_data_product(data_product_id2)
Пример #55
0
    def test_activateInstrumentSample(self):

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel,
                                  name='SBE37IMModel',
                                  description="SBE37IMModel")
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        log.debug('new InstrumentModel id = %s ', instModel_id)

        #Create stream alarms
        """
        test_two_sided_interval
        Test interval alarm and alarm event publishing for a closed
        inteval.
        """

        #        kwargs = {
        #            'name' : 'test_sim_warning',
        #            'stream_name' : 'parsed',
        #            'value_id' : 'temp',
        #            'message' : 'Temperature is above test range of 5.0.',
        #            'type' : StreamAlarmType.WARNING,
        #            'upper_bound' : 5.0,
        #            'upper_rel_op' : '<'
        #        }

        kwargs = {
            'name': 'temperature_warning_interval',
            'stream_name': 'parsed',
            'value_id': 'temp',
            'message':
            'Temperature is below the normal range of 50.0 and above.',
            'type': StreamAlarmType.WARNING,
            'lower_bound': 50.0,
            'lower_rel_op': '<'
        }

        # Create alarm object.
        alarm = {}
        alarm['type'] = 'IntervalAlarmDef'
        alarm['kwargs'] = kwargs

        raw_config = StreamConfiguration(
            stream_name='raw',
            parameter_dictionary_name='ctd_raw_param_dict',
            records_per_granule=2,
            granule_publish_rate=5)
        parsed_config = StreamConfiguration(
            stream_name='parsed',
            parameter_dictionary_name='ctd_parsed_param_dict',
            records_per_granule=2,
            granule_publish_rate=5,
            alarms=[alarm])

        # Create InstrumentAgent
        instAgent_obj = IonObject(
            RT.InstrumentAgent,
            name='agent007',
            description="SBE37IMAgent",
            driver_uri=
            "http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1a-py2.7.egg",
            stream_configurations=[raw_config, parsed_config])
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        log.debug('new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(
            instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug(
            'test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) '
        )
        instDevice_obj = IonObject(RT.InstrumentDevice,
                                   name='SBE37IMDevice',
                                   description="SBE37IMDevice",
                                   serial_number="12345")
        instDevice_id = self.imsclient.create_instrument_device(
            instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(
            instModel_id, instDevice_id)

        log.debug(
            "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ",
            instDevice_id)

        port_agent_config = {
            'device_addr': CFG.device.sbe37.host,
            'device_port': CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance,
                                          name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config=port_agent_config)

        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(
            instAgentInstance_obj, instAgent_id, instDevice_id)

        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        parsed_stream_def_id = self.pubsubcli.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_pdict_id)

        raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_raw_param_dict', id_only=True)
        raw_stream_def_id = self.pubsubcli.create_stream_definition(
            name='raw', parameter_dictionary_id=raw_pdict_id)

        #-------------------------------
        # Create Raw and Parsed Data Products for the device
        #-------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        log.debug('new dp_id = %s', data_product_id1)
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams1 = %s', stream_ids)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id1 = %s', dataset_ids[0])
        self.parsed_dataset = dataset_ids[0]

        pid = self.create_logger('ctd_parsed', stream_ids[0])
        self.loggerpids.append(pid)

        dp_obj = IonObject(RT.DataProduct,
                           name='the raw data',
                           description='raw stream test',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        data_product_id2 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=raw_stream_def_id)
        log.debug('new dp_id = %s', data_product_id2)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id,
                                            data_product_id=data_product_id2)

        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id2)

        # setup notifications for the device and parsed data product
        user_id_1 = self._create_notification(user_name='user_1',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id1)
        #---------- Create notifications for another user and verify that we see different computed subscriptions for the two users ---------
        user_id_2 = self._create_notification(user_name='user_2',
                                              instrument_id=instDevice_id,
                                              product_id=data_product_id2)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                   PRED.hasStream, None, True)
        log.debug('Data product streams2 = %s', str(stream_ids))

        # Retrieve the id of the OUTPUT stream from the out Data Product
        dataset_ids, _ = self.rrclient.find_objects(data_product_id2,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        log.debug('Data set for data_product_id2 = %s', dataset_ids[0])
        self.raw_dataset = dataset_ids[0]

        #elastic search debug
        es_indexes, _ = self.container.resource_registry.find_resources(
            restype='ElasticSearchIndex')
        log.debug('ElasticSearch indexes: %s', [i.name for i in es_indexes])
        log.debug('Bootstrap %s', CFG.bootstrap.use_es)

        def start_instrument_agent():
            self.imsclient.start_instrument_agent_instance(
                instrument_agent_instance_id=instAgentInstance_id)

        gevent.joinall([gevent.spawn(start_instrument_agent)])

        #setup a subscriber to alarm events from the device
        self._events_received = []
        self._event_count = 0
        self._samples_out_of_range = 0
        self._samples_complete = False
        self._async_sample_result = AsyncResult()

        def consume_event(*args, **kwargs):
            log.debug(
                'TestActivateInstrument recieved ION event: args=%s, kwargs=%s, event=%s.',
                str(args), str(kwargs), str(args[0]))
            self._events_received.append(args[0])
            self._event_count = len(self._events_received)
            self._async_sample_result.set()

        self._event_subscriber = EventSubscriber(
            event_type=
            'StreamWarningAlarmEvent',  #'StreamWarningAlarmEvent', #  StreamAlarmEvent
            callback=consume_event,
            origin=instDevice_id)
        self._event_subscriber.start()

        #cleanup
        self.addCleanup(self.imsclient.stop_instrument_agent_instance,
                        instrument_agent_instance_id=instAgentInstance_id)

        def stop_subscriber():
            self._event_subscriber.stop()
            self._event_subscriber = None

        self.addCleanup(stop_subscriber)

        #wait for start
        inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance(
            instAgentInstance_id)
        gate = ProcessStateGate(self.processdispatchclient.read_process,
                                inst_agent_instance_obj.agent_process_id,
                                ProcessStateEnum.RUNNING)
        self.assertTrue(
            gate. await (30),
            "The instrument agent instance (%s) did not spawn in 30 seconds" %
            inst_agent_instance_obj.agent_process_id)

        log.debug('Instrument agent instance obj: = %s',
                  str(inst_agent_instance_obj))

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient(
            instDevice_id,
            to_name=inst_agent_instance_obj.agent_process_id,
            process=FakeProcess())

        log.debug("test_activateInstrumentSample: got ia client %s",
                  str(self._ia_client))

        cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: initialize %s", str(retval))
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.INACTIVE)

        log.debug("(L4-CI-SA-RQ-334): Sending go_active command ")
        cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrument: return value from go_active %s",
                  str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.IDLE)

        cmd = AgentCommand(command=ResourceAgentEvent.GET_RESOURCE_STATE)
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug(
            "(L4-CI-SA-RQ-334): current state after sending go_active command %s",
            str(state))

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: run %s", str(reply))
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=ResourceAgentEvent.PAUSE)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.STOPPED)

        cmd = AgentCommand(command=ResourceAgentEvent.RESUME)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=ResourceAgentEvent.CLEAR)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.IDLE)

        cmd = AgentCommand(command=ResourceAgentEvent.RUN)
        retval = self._ia_client.execute_agent(cmd)
        state = self._ia_client.get_agent_state()
        self.assertEqual(state, ResourceAgentState.COMMAND)

        cmd = AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE)
        for i in xrange(10):
            retval = self._ia_client.execute_resource(cmd)
            log.debug("test_activateInstrumentSample: return from sample %s",
                      str(retval))

        log.debug("test_activateInstrumentSample: calling reset ")
        cmd = AgentCommand(command=ResourceAgentEvent.RESET)
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentSample: return from reset %s",
                  str(reply))

        self._samples_complete = True

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.dataretrieverclient.retrieve(self.parsed_dataset)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        log.debug("test_activateInstrumentSample: RDT parsed: %s",
                  str(rdt.pretty_print()))
        temp_vals = rdt['temp']
        self.assertEquals(len(temp_vals), 10)
        log.debug("test_activateInstrumentSample: all temp_vals: %s",
                  temp_vals)

        #out_of_range_temp_vals = [i for i in temp_vals if i > 5]
        out_of_range_temp_vals = [i for i in temp_vals if i < 50.0]
        log.debug("test_activateInstrumentSample: Out_of_range_temp_vals: %s",
                  out_of_range_temp_vals)
        self._samples_out_of_range = len(out_of_range_temp_vals)

        # if no bad values were produced, then do not wait for an event
        if self._samples_out_of_range == 0:
            self._async_sample_result.set()

        log.debug("test_activateInstrumentSample: _events_received: %s",
                  self._events_received)
        log.debug("test_activateInstrumentSample: _event_count: %s",
                  self._event_count)

        self._async_sample_result.get(timeout=CFG.endpoint.receive.timeout)

        replay_data = self.dataretrieverclient.retrieve(self.raw_dataset)
        self.assertIsInstance(replay_data, Granule)
        rdt = RecordDictionaryTool.load_from_granule(replay_data)
        log.debug("RDT raw: %s", str(rdt.pretty_print()))

        raw_vals = rdt['raw']
        self.assertEquals(len(raw_vals), 10)

        log.debug("l4-ci-sa-rq-138")
        """
        Physical resource control shall be subject to policy

        Instrument management control capabilities shall be subject to policy

        The actor accessing the control capabilities must be authorized to send commands.

        note from maurice 2012-05-18: Talk to tim M to verify that this is policy.  If it is then talk with Stephen to
                                      get an example of a policy test and use that to create a test stub that will be
                                      completed when we have instrument policies.

        Tim M: The "actor", aka observatory operator, will access the instrument through ION.

        """

        #--------------------------------------------------------------------------------
        # Get the extended data product to see if it contains the granules
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id1, user_id=user_id_1)

        def poller(extended_product):
            return len(extended_product.computed.user_notification_requests.
                       value) == 1

        poll(poller, extended_product, timeout=30)

        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id1,
            extended_data_product=extended_product)

        #--------------------------------------------------------------------------------
        #put some events into the eventsdb to test - this should set the comms and data status to WARNING
        #--------------------------------------------------------------------------------

        t = get_ion_ts()
        self.event_publisher.publish_event(ts_created=t,
                                           event_type='DeviceStatusEvent',
                                           origin=instDevice_id,
                                           state=DeviceStatusType.OUT_OF_RANGE,
                                           values=[200])
        self.event_publisher.publish_event(
            ts_created=t,
            event_type='DeviceCommsEvent',
            origin=instDevice_id,
            state=DeviceCommsType.DATA_DELIVERY_INTERRUPTION,
            lapse_interval_seconds=20)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_1)
        self._check_computed_attributes_of_extended_instrument(
            expected_instrument_device_id=instDevice_id,
            extended_instrument=extended_instrument)

        #--------------------------------------------------------------------------------
        # For the second user, check the extended data product and the extended intrument
        #--------------------------------------------------------------------------------
        extended_product = self.dpclient.get_data_product_extension(
            data_product_id=data_product_id2, user_id=user_id_2)
        self._check_computed_attributes_of_extended_product(
            expected_data_product_id=data_product_id2,
            extended_data_product=extended_product)

        #---------- Put some events into the eventsdb to test - this should set the comms and data status to WARNING  ---------

        t = get_ion_ts()
        self.event_publisher.publish_event(ts_created=t,
                                           event_type='DeviceStatusEvent',
                                           origin=instDevice_id,
                                           state=DeviceStatusType.OUT_OF_RANGE,
                                           values=[200])
        self.event_publisher.publish_event(
            ts_created=t,
            event_type='DeviceCommsEvent',
            origin=instDevice_id,
            state=DeviceCommsType.DATA_DELIVERY_INTERRUPTION,
            lapse_interval_seconds=20)

        #--------------------------------------------------------------------------------
        # Get the extended instrument
        #--------------------------------------------------------------------------------

        extended_instrument = self.imsclient.get_instrument_device_extension(
            instrument_device_id=instDevice_id, user_id=user_id_2)
        self._check_computed_attributes_of_extended_instrument(
            expected_instrument_device_id=instDevice_id,
            extended_instrument=extended_instrument)

        #--------------------------------------------------------------------------------
        # Deactivate loggers
        #--------------------------------------------------------------------------------

        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)

        self.dpclient.delete_data_product(data_product_id1)
        self.dpclient.delete_data_product(data_product_id2)