def lookup_associations(self, classname):
     from pyon.util.config import Config
     from pyon.util.containers import DotDict
     Predicates = DotDict()
     Predicates.update(
         Config(["res/config/associations.yml"]).data['PredicateTypes'])
     output = {}
     for key in Predicates:
         domain = str(Predicates[key]["domain"])
         range = str(Predicates[key]["range"])
         if classname in domain:
             output[key] = Predicates[key]
         if classname in range:
             output[key] = Predicates[key]
     return output
示例#2
0
    def test_dict_modifier(self):
        base = DotDict({"foo": "bar", "bah": "fah"})
        dict_modifier = DictModifier(base)
        self.assertEqual(dict_modifier["foo"], "bar")

        top = DotDict({"bah": "lah", "doh": "ray"})
        dict_modifier.update(top)
        saved_dict_modifier = dict_modifier
        self.assertEqual(dict_modifier["foo"], "bar")
        self.assertEqual(dict_modifier["bah"], "lah")
        self.assertEqual(dict_modifier["doh"], "ray")

        dict_modifier = DictModifier(dict_modifier)
        self.assertEqual(dict_modifier["foo"], "bar")
        self.assertEqual(dict_modifier["bah"], "lah")
        self.assertEqual(dict_modifier["doh"], "ray")
        self.assertEqual(dict_modifier.base, saved_dict_modifier)

        top = DotDict({"bah": "trah"})
        dict_modifier.update(top)
        saved_dict_modifier = dict_modifier
        self.assertEqual(dict_modifier["foo"], "bar")
        self.assertEqual(dict_modifier["bah"], "trah")
        self.assertEqual(dict_modifier["doh"], "ray")
示例#3
0
    def setUpClass(cls):

        # This test does not start a container so we have to hack creating a FileSystem singleton instance
        FileSystem(DotDict())

        @unittest.skipIf(no_numpy_h5py, 'numpy and/or h5py not imported')
        def create_known(dataset_name, rootgrp_name, grp_name):
            """
            A known array to compare against during tests
            """

            known_array = numpy.random.rand(10, 20)

            filename = FileSystem.get_url(FS.TEMP, random_name(), ".hdf5")

            # Write an hdf file with known values to compare against
            h5pyfile = h5py.File(filename, mode='w', driver='core')
            grp = h5pyfile.create_group(rootgrp_name)
            subgrp = grp.create_group(grp_name)
            dataset = subgrp.create_dataset(dataset_name,
                                            known_array.shape,
                                            known_array.dtype.str,
                                            compression='gzip',
                                            compression_opts=4,
                                            maxshape=(None, None))

            dataset.write_direct(known_array)
            h5pyfile.close()

            # convert the hdf file into a binary string
            f = open(filename, mode='rb')
            # read the binary string representation of the file
            known_hdf_as_string = f.read(
            )  # this is a known string to compare against during tests
            f.close()
            # cleaning up
            FileSystem.unlink(f.name)

            return known_array, known_hdf_as_string

        # Use the class method to patch these attributes onto the class.
        TestScienceObjectCodec.known_array, TestScienceObjectCodec.known_hdf_as_string = create_known(
            TestScienceObjectCodec.dataset_name,
            TestScienceObjectCodec.rootgrp_name,
            TestScienceObjectCodec.grp_name)

        TestScienceObjectCodec.known_hdf_as_sha1 = sha1(
            TestScienceObjectCodec.known_hdf_as_string)
示例#4
0
    def _launch_highcharts(self, viz_id, data_product_id, out_stream_id):
        '''
        Launches the high-charts transform
        '''
        stream_ids, _ = self.clients.resource_registry.find_objects(
            data_product_id, PRED.hasStream, id_only=True)
        if not stream_ids:
            raise BadRequest(
                "Can't launch high charts streaming: data product doesn't have associated stream (%s)"
                % data_product_id)

        queue_name = 'viz_%s' % data_product_id
        sub_id = self.clients.pubsub_management.create_subscription(
            name='viz transform for %s' % data_product_id,
            exchange_name=queue_name,
            stream_ids=stream_ids)

        self.clients.pubsub_management.activate_subscription(sub_id)

        self.clients.resource_registry.create_association(
            viz_id, PRED.hasSubscription, sub_id)

        config = DotDict()
        config.process.publish_streams.highcharts = out_stream_id
        config.process.queue_name = queue_name

        # This process MUST be launched the first time or fail so the user
        # doesn't wait there for nothing to happen.
        schedule = ProcessSchedule()
        schedule.restart_mode = ProcessRestartMode.NEVER
        schedule.queueing_mode = ProcessQueueingMode.NEVER

        # Launch the process
        procdef_id = self._get_highcharts_procdef()
        pid = self.clients.process_dispatcher.schedule_process(
            process_definition_id=procdef_id,
            schedule=schedule,
            configuration=config)

        # Make sure it launched or raise an error

        process_gate = ProcessStateGate(
            self.clients.process_dispatcher.read_process, pid,
            ProcessStateEnum.RUNNING)
        if not process_gate. await (self.CFG.get_safe(
                'endpoint.receive.timeout', 10)):
            raise ServiceUnavailable(
                "Failed to launch high charts realtime visualization")
示例#5
0
    def launch_benchmark(transform_number=1, primer=1, message_length=4):
        import gevent
        from gevent.greenlet import Greenlet
        from pyon.util.containers import DotDict
        from pyon.net.transport import NameTrio
        from pyon.net.endpoint import Publisher
        import numpy
        from pyon.ion.granule.record_dictionary import RecordDictionaryTool
        from pyon.ion.granule.taxonomy import TaxyTool
        from pyon.ion.granule.granule import build_granule

        tt = TaxyTool()
        tt.add_taxonomy_set('a')

        import uuid
        num = transform_number
        msg_len = message_length
        transforms = list()
        pids = 1
        TransformBenchTesting.message_length = message_length
        cc = Container.instance
        pub = Publisher(to_name=NameTrio(get_sys_name(),
                                         str(uuid.uuid4())[0:6]))
        for i in xrange(num):
            tbt = cc.proc_manager._create_service_instance(
                str(pids), 'tbt', 'prototype.transforms.linear',
                'TransformInPlaceNewGranule',
                DotDict({
                    'process': {
                        'name': 'tbt%d' % pids,
                        'transform_id': pids
                    }
                }))
            tbt.init()
            tbt.start()
            gevent.sleep(0.2)
            for i in xrange(primer):
                rd = RecordDictionaryTool(tt, message_length)
                rd['a'] = numpy.arange(message_length)
                gran = build_granule(data_producer_id='dp_id',
                                     taxonomy=tt,
                                     record_dictionary=rd)
                pub.publish(gran)

            g = Greenlet(tbt.perf)
            g.start()
            transforms.append(tbt)
            pids += 1
示例#6
0
    def test_event_triggered_transform_B(self):
        '''
        Test that packets are processed by the event triggered transform
        '''

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='EventTriggeredTransform_B',
            description='For testing EventTriggeredTransform_B')
        process_definition.executable[
            'module'] = 'ion.processes.data.transforms.event_triggered_transform'
        process_definition.executable['class'] = 'EventTriggeredTransform_B'
        event_transform_proc_def_id = self.process_dispatcher.create_process_definition(
            process_definition=process_definition)

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)

        stream_def_id = self.pubsub.create_stream_definition(
            'stream_def', parameter_dictionary_id=pdict_id)
        stream_id, _ = self.pubsub.create_stream(
            'test_stream',
            exchange_point='science_data',
            stream_definition_id=stream_def_id)

        # Build the config
        config = DotDict()
        config.process.queue_name = self.exchange_name
        config.process.exchange_point = self.exchange_point
        config.process.publish_streams.output = stream_id
        config.process.event_type = 'ResourceLifecycleEvent'
        config.process.stream_id = stream_id

        # Schedule the process
        self.process_dispatcher.schedule_process(
            process_definition_id=event_transform_proc_def_id,
            configuration=config)

        #---------------------------------------------------------------------------------------------
        # Publish an event to wake up the event triggered transform
        #---------------------------------------------------------------------------------------------

        event_publisher = EventPublisher("ResourceLifecycleEvent")
        event_publisher.publish_event(origin='fake_origin')
示例#7
0
 def _lookup_associations(self, classname):
     """
     Returns dict of associations for given object type (not base types)
     """
     from pyon.util.config import Config
     from pyon.util.containers import DotDict
     if not self._associations:
         self._associations = DotDict()
         assoc_defs = Config(["res/config/associations.yml"]).data['AssociationDefinitions']
         self._associations.update((ad['predicate'], ad) for ad in assoc_defs)
     output = {}
     for key in self._associations:
         domain = str(self._associations[key]["domain"])
         range = str(self._associations[key]["range"])
         if classname in domain or classname in range:
             output[key] = self._associations[key]
     return output
示例#8
0
    def start_app(self, appdef=None, config=None):
        """
        @brief Start an app from an app definition.
        Note: apps can come in one of 2 variants:
        1 processapp: In-line defined process to be started
        2 regular app: Full app definition
        """
        log.debug("AppManager.start_app(appdef=%s) ..." % appdef)

        appdef = DotDict(appdef)

        if 'config' in appdef:
            app_cfg = deepcopy(appdef.config)
            if config:
                dict_merge(app_cfg, config, inplace=True)
            config = app_cfg

        if 'processapp' in appdef:
            # Case 1: Appdef contains definition of process to start
            name, module, cls = appdef.processapp
            try:
                pid = self.container.spawn_process(name, module, cls, config)
                appdef._pid = pid
                self.apps.append(appdef)
            except Exception:
                log.exception("Appl %s start from processapp failed" %
                              appdef.name)
        else:
            # Case 2: Appdef contains full app start params
            modpath = appdef.mod
            try:
                mod = named_any(modpath)
                appdef._mod_loaded = mod

                # Start the app
                supid, state = mod.start(self.container, START_PERMANENT,
                                         appdef, config)
                appdef._supid = supid
                appdef._state = state

                log.debug("App '%s' started. Root sup-id=%s" %
                          (appdef.name, supid))

                self.apps.append(appdef)
            except Exception:
                log.exception("Appl %s start from appdef failed" % appdef.name)
示例#9
0
def get_obj_geospatial_point(doc, calculate=True):
    """Extracts a geospatial point (lat, lon, elev) from given object dict, by looking for an attribute with
    GeospatialIndex or GeospatialPoint or GeospatialLocation type or computing the center from a bounds
    """
    geo_center = None
    # TODO: Be more flexible about finding attributes with the right types
    if "location" in doc:
        geo_center = doc["location"]
    if "geospatial_point_center" in doc:
        geo_center = doc["geospatial_point_center"]
    if "details" in doc and type(
            doc["details"]) is dict and "location" in doc["details"]:
        geo_center = doc["details"]["location"]
    if not geo_center and calculate:
        # Try to calculate center point from bounds
        present, geo_bounds = get_obj_geospatial_bounds(doc,
                                                        calculate=False,
                                                        return_geo_bounds=True)
        if present:
            try:
                from ion.util.geo_utils import GeoUtils
                geo_bounds_obj = DotDict(**geo_bounds)
                geo_center = GeoUtils.calc_geospatial_point_center(
                    geo_bounds_obj)
            except Exception:
                log.exception("Could not calculate geospatial center point")
    if geo_center and isinstance(geo_center, dict):
        if "lat" in geo_center and "lon" in geo_center:
            lat, lon = geo_center.get("lat", 0), geo_center.get("lon", 0)
            if lat or lon:
                return True, (lat, lon, 0)
        elif "latitude" in geo_center and "longitude" in geo_center:
            lat, lon = geo_center.get("latitude",
                                      0), geo_center.get("longitude", 0)
            elev = geo_center.get("elevation", 0)
            if lat or lon or elev:
                return True, (lat, lon, elev)
        elif "geospatial_latitude" in geo_center and "geospatial_longitude" in geo_center:
            lat, lon = geo_center.get("geospatial_latitude",
                                      0), geo_center.get(
                                          "geospatial_longitude", 0)
            elev = geo_center.get("geospatial_vertical_location", 0)
            if lat or lon:
                return True, (lat, lon, elev)
    return False, (0, 0, 0)
示例#10
0
    def test_persist_data_stream(self):
        config = IngestionConfiguration()

        self.ingestion_management.read_ingestion_configuration = Mock()
        self.ingestion_management.read_ingestion_configuration.return_value = config

        self.ingestion_management.is_persisted = Mock()
        self.ingestion_management.is_persisted.return_value = False

        self.ingestion_management.setup_queues = Mock()
        self.ingestion_management.setup_queues.return_value = True

        self.pubsub_read.return_value = DotDict(persisted=False)

        retval = self.ingestion_management.persist_data_stream(
            'stream_id', 'config_id', 'dataset_id')

        self.assertEquals(retval, 'dataset_id')
示例#11
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.client = DotDict()
        self.client.DAMS = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.client.DPMS = DataProductManagementServiceClient(
            node=self.container.node)
        self.client.IMS = InstrumentManagementServiceClient(
            node=self.container.node)
        self.client.OMS = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.client.PSMS = PubsubManagementServiceClient(
            node=self.container.node)

        self.client.RR = ResourceRegistryServiceClient(
            node=self.container.node)
示例#12
0
    def _create_calibration_coefficients_dict(self):
        config = DotDict()
        config.process.calibration_coeffs = {
                  'temp_calibration_coeffs': {
                      'TA0' : 1.561342e-03,
                      'TA1' : 2.561486e-04,
                      'TA2' : 1.896537e-07,
                      'TA3' : 1.301189e-07,
                      'TOFFSET' : 0.000000e+00
                  },

                  'cond_calibration_coeffs':  {
                      'G' : -9.896568e-01,
                      'H' : 1.316599e-01,
                      'I' : -2.213854e-04,
                      'J' : 3.292199e-05,
                      'CPCOR' : -9.570000e-08,
                      'CTCOR' : 3.250000e-06,
                      'CSLOPE' : 1.000000e+00
                  },

                  'pres_calibration_coeffs' : {
                      'PA0' : 4.960417e-02,
                      'PA1' : 4.883682e-04,
                      'PA2' : -5.687309e-12,
                      'PTCA0' : 5.249802e+05,
                      'PTCA1' : 7.595719e+00,
                      'PTCA2' : -1.322776e-01,
                      'PTCB0' : 2.503125e+01,
                      'PTCB1' : 5.000000e-05,
                      'PTCB2' : 0.000000e+00,
                      'PTEMPA0' : -6.431504e+01,
                      'PTEMPA1' : 5.168177e+01,
                      'PTEMPA2' : -2.847757e-01,
                      'POFFSET' : 0.000000e+00
                  }

              }


        return config
示例#13
0
    def on_initial_bootstrap(self, process, config, **kwargs):

        if os.environ.get('PYCC_MODE'):
            # This environment is an ion integration test
            log.info('PYCC_MODE: skipping qc_post_processor launch')
            return
        if self.process_exists(process, 'qc_post_processor'):
            # Short circuit the bootstrap to make sure not more than one is ever started
            return

        self.scheduler_service = SchedulerServiceProcessClient(process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(
            process=process)
        self.run_interval = CFG.get_safe('service.qc_processing.run_interval',
                                         24)

        interval_key = uuid4().hex  # Unique identifier for this process

        config = DotDict()
        config.process.interval_key = interval_key

        process_definition = ProcessDefinition(
            name='qc_post_processor',
            executable={
                'module': 'ion.processes.data.transforms.qc_post_processing',
                'class': 'QCPostProcessing'
            })
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition)

        process_id = self.process_dispatcher.create_process(
            process_definition_id)
        self.process_dispatcher.schedule_process(process_definition_id,
                                                 process_id=process_id,
                                                 configuration=config)

        timer_id = self.scheduler_service.create_interval_timer(
            start_time=str(time.time()),
            end_time='-1',  #Run FOREVER
            interval=3600 * self.run_interval,
            event_origin=interval_key)
示例#14
0
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dmpsclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.psmsclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.c = DotDict()
        self.c.resource_registry = self.rrclient
        self.RR2 = EnhancedResourceRegistryClient(self.rrclient)

        # create missing data process definition
        self.dsmsclient = DataProcessManagementServiceClient(
            node=self.container.node)
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
            description="normally in preload",
            module='ion.processes.data.transforms.logical_transform',
            class_name='logical_transform')
        self.dsmsclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.dsmsclient.deactivate_data_process(proc_id)
                self.dsmsclient.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)
示例#15
0
    def setUp(self):
        # Start container by calling parent's setUp
        super(TestAssembly, self).setUp()

        # Now create client to DataProductManagementService
        self.client = DotDict()
        self.client.DAMS = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.client.DPMS = DataProductManagementServiceClient(
            node=self.container.node)
        self.client.IMS = InstrumentManagementServiceClient(
            node=self.container.node)
        self.client.OMS = ObservatoryManagementServiceClient(
            node=self.container.node)
        self.client.PSMS = PubsubManagementServiceClient(
            node=self.container.node)
        self.client.DPRS = DataProcessManagementServiceClient(
            node=self.container.node)

        self.client.RR = ResourceRegistryServiceClient(
            node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.client.RR)
        self.dataset_management = DatasetManagementServiceClient()

        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
            description="normally in preload",
            module='ion.processes.data.transforms.logical_transform',
            class_name='logical_transform')

        self.client.DPRS.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.client.RR.find_resources(
                    RT.DataProcess, None, None, True)[0]:
                self.client.DPRS.deactivate_data_process(proc_id)
                self.client.DPRS.delete_data_process(proc_id)

        self.addCleanup(killAllDataProcesses)
    def _find_lookup_tables(self, resource_id="", configuration=None):
        #check if resource has lookup tables attached

        configuration = configuration or DotDict()

        attachment_objs, _ = self.clients.resource_registry.find_objects(
            resource_id, PRED.hasAttachment, RT.Attachment, False)

        for attachment_obj in attachment_objs:

            words = set(attachment_obj.keywords)

            if 'DataProcessInput' in words:
                configuration[attachment_obj.name] = attachment_obj.content
                log.debug("Lookup table, %s, found in attachment %s" %
                          (attachment_obj.content, attachment_obj.name))
            else:
                log.debug("NO lookup table in attachment %s" %
                          attachment_obj.name)

        return configuration
    def setUp(self):
        # Start container
        super(TestRSNIntegration, self).setUp()
        config = DotDict()

        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        self.catch_alert= gevent.queue.Queue()
    def setUp(self):
        # Start container
        super(TestActivateRSNVel3DInstrument, self).setUp()
        config = DotDict()

        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
示例#19
0
    def start_rel(self, rel=None, config=None):
        """
        @brief Recurse over the rel and start apps defined there.
        Note: apps in a rel file can come in one of 2 forms:
        1 processapp: In-line defined process to be started as app
        2 app file: Reference to an app definition in an app file
        If the rel file provides an app config block, it is provided to spawn the process.
        Any given function config dict is merged on top of this.
        """
        log.debug("AppManager.start_rel(rel=%s) ...", rel)

        if rel is None: rel = {}

        for rel_app_cfg in rel.apps:
            name = rel_app_cfg.name
            log.debug("app definition in rel: %s" % str(rel_app_cfg))

            if 'processapp' in rel_app_cfg:
                # Case 1: Rel contains definition of process to start as app
                name, module, cls = rel_app_cfg.processapp

                rel_cfg = None
                if 'config' in rel_app_cfg:
                    rel_cfg = rel_app_cfg.config.copy()
                    if config:
                        dict_merge(rel_cfg, config, inplace=True)

                self.container.spawn_process(name, module, cls, rel_cfg)
                self.apps.append(
                    DotDict(type="application",
                            name=name,
                            processapp=rel_app_cfg.processapp))

            else:
                # Case 2: Rel contains reference to app file to start
                app_file_path = 'res/apps/%s.yml' % (name)
                rel_cfg = rel_app_cfg.get('config', None)
                if config:
                    dict_merge(rel_cfg, config, inplace=True)
                self.start_app_from_url(app_file_path, config=rel_cfg)
示例#20
0
    def setUp(self):
        # Start container
        super(TestActivateInstrumentIntegration, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(TestActivateInstrumentIntegration.es_cleanup)

        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        self.usernotificationclient = UserNotificationServiceClient()

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
示例#21
0
    def create_data_process_logger(self, data_product_id, clone_id,
                                   argument_map):
        '''
        Launches a data process that just prints input
        '''
        out_name = argument_map.values()[0]

        # Make the transfofm function
        tf_obj = IonObject(
            RT.TransformFunction,
            name='stream_logger',
            description='',
            function='stream_logger',
            module='ion.services.sa.test.test_data_process_functions',
            arguments=['x'],
            function_type=TransformFunctionType.TRANSFORM)
        func_id = self.data_process_management.create_transform_function(
            tf_obj)
        self.addCleanup(self.data_process_management.delete_transform_function,
                        func_id)

        # Make the data process definition
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='stream_logger',
            description='logs some stream stuff',
            data_process_type=DataProcessTypeEnum.RETRIEVE_PROCESS)
        configuration = DotDict()
        configuration.publish_limit = 40
        dpd_id = self.data_process_management.create_data_process_definition(
            dpd_obj, func_id)
        data_process_id = self.data_process_management.create_data_process(
            data_process_definition_id=dpd_id,
            inputs=[data_product_id],
            outputs=[clone_id],
            configuration=configuration,
            argument_map=argument_map,
            out_param_name=out_name)
        return data_process_id
示例#22
0
    def test_presf_L0_splitter(self):
        '''
        Test that packets are processed by the ctd_L1_pressure transform
        '''

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='Presf L0 Splitter',
            description='For testing Presf L0 Splitter')
        process_definition.executable[
            'module'] = 'ion.processes.data.transforms.ctd.presf_L0_splitter'
        process_definition.executable['class'] = 'PresfL0Splitter'
        ctd_transform_proc_def_id = self.process_dispatcher.create_process_definition(
            process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = self.exchange_name
        config.process.exchange_point = self.exchange_point

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)

        stream_def_id = self.pubsub.create_stream_definition(
            'pres_stream_def', parameter_dictionary_id=pdict_id)
        pres_stream_id, _ = self.pubsub.create_stream(
            'test_pressure',
            stream_definition_id=stream_def_id,
            exchange_point='science_data')

        config.process.publish_streams.absolute_pressure = pres_stream_id

        # Schedule the process
        self.process_dispatcher.schedule_process(
            process_definition_id=ctd_transform_proc_def_id,
            configuration=config)
    def test_set_status_computed_attributes(self):
        # set_status_computed_attributes(self, computed_attrs, values_dict=None, availability=None, reason=None)

        container = DotDict()

        log.debug("check null values")
        self.ASB.set_status_computed_attributes_notavailable(
            container, "this is the reason")

        for attr, enumval in reverse_mapping.iteritems():
            self.assertTrue(hasattr(container, attr))
            attrval = getattr(container, attr)
            self.assertIsInstance(attrval, ComputedIntValue)
            self.assertEqual(ComputedValueAvailability.NOTAVAILABLE,
                             attrval.status)
            self.assertEqual("this is the reason", attrval.reason)

        log.debug("check provided values")

        statuses = {
            AggregateStatusType.AGGREGATE_COMMS:
            DeviceStatusType.STATUS_CRITICAL,
            AggregateStatusType.AGGREGATE_POWER:
            DeviceStatusType.STATUS_WARNING,
            AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_OK,
            AggregateStatusType.AGGREGATE_LOCATION:
            DeviceStatusType.STATUS_UNKNOWN
        }

        self.ASB.set_status_computed_attributes(
            container, statuses, ComputedValueAvailability.PROVIDED, "na")

        for attr, enumval in reverse_mapping.iteritems():
            self.assertTrue(hasattr(container, attr))
            attrval = getattr(container, attr)
            self.assertIsInstance(attrval, ComputedIntValue)
            self.assertEqual(ComputedValueAvailability.PROVIDED,
                             attrval.status)
            self.assertEqual(statuses[enumval], attrval.value)
    def _launch_transform(self,
                          name_of_transform='',
                          data_proc_def_id=None,
                          input_dpod_id=None,
                          output_dpod_id=None):

        # We need the key name here to be "L2_stream", since when the data process is launched, this name goes into
        # the config as in config.process.publish_streams.L2_stream when the config is used to launch the data process

        if name_of_transform in ['L0', 'L1']:
            binding = '%s_stream' % name_of_transform
        elif name_of_transform == 'L2_salinity':
            binding = 'salinity'
        elif name_of_transform == 'L2_density':
            binding = 'density'

        output_products = {binding: output_dpod_id}

        config = None
        if name_of_transform == 'L1':
            config = self._create_calibration_coefficients_dict()
        elif name_of_transform == 'L2_density':
            config = DotDict()
            config.process = {'lat': 32.7153, 'lon': 117.1564}

        data_proc_id = self.data_process_management.create_data_process(
            data_proc_def_id, [input_dpod_id], output_products, config)
        self.addCleanup(self.data_process_management.delete_data_process,
                        data_proc_id)

        self.data_process_management.activate_data_process(data_proc_id)
        self.addCleanup(self.data_process_management.deactivate_data_process,
                        data_proc_id)

        log.debug("Created a data process for ctdbp %s transform: id = %s",
                  name_of_transform, data_proc_id)

        return data_proc_id
    def test_delete_transform(self):
        # mocks
        self.transform_service.read_transform = Mock()
        self.transform_service.read_transform.return_value = DotDict({'process_id':'pid'})
        find_list = ['process_definition','subscription_id','stream_id']
        def finds(*args, **kwargs):
            return ([find_list.pop(0)],'junk')
        self.mock_rr_find.side_effect = finds
        association_list = ['one','two','three']
        def associations(*args,**kwargs):
            return [association_list.pop(0)]
        self.mock_rr_assoc.side_effect = associations


        # execution
        ret = self.transform_service.delete_transform('mock_transform_id')

        # assertions
        self.transform_service.read_transform.assert_called_with(transform_id='mock_transform_id')
        self.mock_pd_cancel.assert_called_with('pid')
        self.assertEquals(self.mock_rr_find.call_count,3)
        self.assertEquals(self.mock_rr_del_assoc.call_count,3)
        self.assertEquals(self.mock_rr_delete.call_count,1)
示例#26
0
    def _start_dependencies(self):
        """
        Starts the services declared in the class or instance variable "service_dependencies"
        """
        self.clients = DotDict()
        svc_deps = getattr(self, "service_dependencies", {})
        log.debug("Starting service dependencies. Number=%s" % len(svc_deps))
        if not svc_deps:
            return
        for svc in svc_deps:
            config = None
            if type(svc) in (tuple, list):
                config = svc[1]
                svc = svc[0]

            # Start the service
            self._start_service(svc, config=config)

            # Create a client
            clcls = service_registry.services[svc].simple_client
            self.clients[svc] = clcls(name=svc, node=self.container.node)

        log.debug("Service dependencies started")
    def setup_transform(self):
        self.preload()
        queue_name = 'transform_prime'

        stream_info = self.setup_streams()
        in_stream_id, in_stream_def_id = stream_info[0]
        out_stream_id, out_stream_def_id = stream_info[1]

        routes = {}
        routes[(in_stream_id, out_stream_id)] = None

        config = DotDict()

        config.process.queue_name = queue_name
        config.process.routes = routes
        config.process.publish_streams = {out_stream_id: out_stream_id}

        sub_id = self.pubsub_management.create_subscription(
            queue_name, stream_ids=[in_stream_id])
        self.addCleanup(self.pubsub_management.delete_subscription, sub_id)
        self.pubsub_management.activate_subscription(sub_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription, sub_id)

        self.container.spawn_process(
            'transform_prime', 'ion.processes.data.transforms.transform_prime',
            'TransformPrime', config)

        listen_sub_id = self.pubsub_management.create_subscription(
            'listener', stream_ids=[out_stream_id])
        self.addCleanup(self.pubsub_management.delete_subscription,
                        listen_sub_id)

        self.pubsub_management.activate_subscription(listen_sub_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription,
                        listen_sub_id)
        return [(in_stream_id, in_stream_def_id),
                (out_stream_id, out_stream_def_id)]
示例#28
0
文件: apps.py 项目: ooici-dm/pyon
    def start_rel(self, rel=None):
        """
        @brief Recurse over the rel and start apps defined there.
        Note: apps in a rel file can come in one of 2 forms:
        1 processapp: In-line defined process to be started as app
        2 app file: Reference to an app definition in an app file
        """
        log.debug("AppManager.start_rel(rel=%s) ..." % str(rel))

        if rel is None: rel = {}

        for rel_app_cfg in rel.apps:
            name = rel_app_cfg.name
            log.debug("app definition in rel: %s" % str(rel_app_cfg))

            if 'processapp' in rel_app_cfg:
                # Case 1: Rel contains definition of process to start as app
                name, module, cls = rel_app_cfg.processapp

                if 'config' in rel_app_cfg:
                    # Nest dict modifier and apply config from rel file
                    config = DictModifier(CFG, rel_app_cfg.config)
                else:
                    config = DictModifier(CFG)

                self.container.spawn_process(name, module, cls, config)
                self.apps.append(
                    DotDict(type="application",
                            name=name,
                            processapp=rel_app_cfg.processapp))

            else:
                # Case 2: Rel contains reference to app file to start
                app_file_path = 'res/apps/%s.yml' % (name)
                self.start_app_from_url(app_file_path,
                                        config=rel_app_cfg.get('config', None))
    def simplify_assn_resource_ids(self, assn_list):
        count = 0

        lookup = {}

        retval = []

        for a in assn_list:
            if not a.s in lookup:
                lookup[a.s] = count
                count += 1
            if not a.o in lookup:
                lookup[a.o] = count
                count += 1
            retval.append(
                DotDict({
                    "s": lookup[a.s],
                    "st": a.st,
                    "p": a.p,
                    "o": lookup[a.o],
                    "ot": a.ot
                }))

        return retval
示例#30
0
    def setUp(self):
        # Start container by calling parent's setUp
        super(TestAssembly, self).setUp()

        # Now create client to DataProductManagementService
        self.client = DotDict()
        self.client.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.client.DPMS = DataProductManagementServiceClient(node=self.container.node)
        self.client.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.client.OMS  = ObservatoryManagementServiceClient(node=self.container.node)
        self.client.PSMS = PubsubManagementServiceClient(node=self.container.node)
        self.client.DPRS = DataProcessManagementServiceClient(node=self.container.node)

        self.client.RR   = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.client.RR)
        self.dataset_management = DatasetManagementServiceClient()


        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.client.RR.find_resources(RT.DataProcess, None, None, True)[0]:
                self.client.DPRS.deactivate_data_process(proc_id)
                self.client.DPRS.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)