コード例 #1
0
    def __init__(self,param_dictionary=None, stream_definition_id='', locator=None):
        """
        """
        if type(param_dictionary) == dict:
            self._pdict = ParameterDictionary.load(param_dictionary)
        
        elif isinstance(param_dictionary,ParameterDictionary):
            self._pdict = param_dictionary
        
        elif stream_definition_id:
            pdict = RecordDictionaryTool.pdict_from_stream_def(stream_definition_id)
            self._pdict = ParameterDictionary.load(pdict)
            self._stream_def = stream_definition_id
        
        else:
            raise BadRequest('Unable to create record dictionary with improper ParameterDictionary')
        
        if stream_definition_id:
            self._stream_def=stream_definition_id
        
        self._shp = None
        self._rd = {}
        self._locator = locator

        self._setup_params()
コード例 #2
0
ファイル: test_types.py プロジェクト: mbarry02/coi-services
    def cov_io(self, context, value_array, comp_val=None):
        pdict = ParameterDictionary()
        time = ParameterContext(name='time', param_type=QuantityType(value_encoding=np.float64))
        pdict.add_context(context)
        pdict.add_context(time, True)
        # Construct temporal and spatial Coordinate Reference System objects
        tcrs = CRS([AxisTypeEnum.TIME])
        scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])

        # Construct temporal and spatial Domain objects
        tdom = GridDomain(GridShape('temporal', [0]), tcrs, MutabilityEnum.EXTENSIBLE) # 1d (timeline)
        sdom = GridDomain(GridShape('spatial', [0]), scrs, MutabilityEnum.IMMUTABLE) # 0d spatial topology (station/trajectory)

        # Instantiate the SimplexCoverage providing the ParameterDictionary, spatial Domain and temporal Domain
        cov = SimplexCoverage('test_data', create_guid(), 'sample coverage_model', parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom)

        cov.insert_timesteps(len(value_array))
        cov.set_parameter_values('test', tdoa=slice(0,len(value_array)), value=value_array)
        comp_val = comp_val if comp_val is not None else value_array
        testval = cov.get_parameter_values('test')
        try:
            np.testing.assert_array_equal(testval, comp_val)
        except:
            print repr(value_array)
            raise
コード例 #3
0
    def _new_coverage(self, root_dir, persistence_guid, name, reference_coverage_locs, parameter_dictionary, complex_type, reference_coverage_extents={}):
        reference_coverage_locs = reference_coverage_locs or [] # Can be empty
        # Coverage doesn't exist, make a new one
        if name is None:
            raise SystemError('\'reference_coverages\' and \'name\' cannot be None')
        if not isinstance(name, basestring):
            raise TypeError('\'name\' must be of type basestring')
        self.name = name
        if parameter_dictionary is None:
            parameter_dictionary = ParameterDictionary()

        # Must be in 'a' for a new coverage
        self.mode = 'a'

        self._reference_covs = collections.OrderedDict()

        if not hasattr(reference_coverage_locs, '__iter__'):
            reference_coverage_locs = [reference_coverage_locs]

        self._persistence_layer = PostgresPersistenceLayer(root_dir,
                                                           persistence_guid,
                                                           name=self.name,
                                                           mode=self.mode,
                                                           param_dict=parameter_dictionary,
                                                           rcov_locs=reference_coverage_locs,
                                                           rcov_extents=reference_coverage_extents,
                                                           complex_type=complex_type,
                                                           coverage_type='complex',
                                                           version=self.version)

        for pc in parameter_dictionary.itervalues():
            self.append_parameter(pc[1])
コード例 #4
0
def get_param_dict(param_dict_name = None):
    raise NotImplementedError('This method has been replaced by DatasetManagementService, please use read_parameter_dictionary_by_name instead')
    # read the file just once, not every time needed
    global _PARAMETER_DICTIONARIES
    global _PARAMETER_CONTEXTS
    if not _PARAMETER_DICTIONARIES:
        param_dict_defs_file = "res/config/param_dict_defs.yml"
        with open(param_dict_defs_file, "r") as f_dict:
            dict_string = f_dict.read()
        _PARAMETER_DICTIONARIES = yaml.load(dict_string)

        param_context_defs_file = "res/config/param_context_defs.yml"
        with open(param_context_defs_file, "r") as f_ctxt:
            ctxt_string = f_ctxt.read()
        _PARAMETER_CONTEXTS = yaml.load(ctxt_string)

    # make sure we have the one requested
    context_names = _PARAMETER_DICTIONARIES[param_dict_name]
    for name in context_names:
        if not _PARAMETER_CONTEXTS.has_key(name):
            raise AssertionError('The parameter dict has a context that does not exist in the parameter context defs specified in yml: %s' % name)

    # package and ship
    pdict = ParameterDictionary()
    for ctxt_name in context_names:
        param_context = ParameterContext.load(_PARAMETER_CONTEXTS[ctxt_name])
        pdict.add_context(param_context)
    return pdict
コード例 #5
0
def get_param_dict(param_dict_name=None):
    raise NotImplementedError(
        'This method has been replaced by DatasetManagementService, please use read_parameter_dictionary_by_name instead'
    )
    # read the file just once, not every time needed
    global _PARAMETER_DICTIONARIES
    global _PARAMETER_CONTEXTS
    if not _PARAMETER_DICTIONARIES:
        param_dict_defs_file = "res/config/param_dict_defs.yml"
        with open(param_dict_defs_file, "r") as f_dict:
            dict_string = f_dict.read()
        _PARAMETER_DICTIONARIES = yaml.load(dict_string)

        param_context_defs_file = "res/config/param_context_defs.yml"
        with open(param_context_defs_file, "r") as f_ctxt:
            ctxt_string = f_ctxt.read()
        _PARAMETER_CONTEXTS = yaml.load(ctxt_string)

    # make sure we have the one requested
    context_names = _PARAMETER_DICTIONARIES[param_dict_name]
    for name in context_names:
        if not _PARAMETER_CONTEXTS.has_key(name):
            raise AssertionError(
                'The parameter dict has a context that does not exist in the parameter context defs specified in yml: %s'
                % name)

    # package and ship
    pdict = ParameterDictionary()
    for ctxt_name in context_names:
        param_context = ParameterContext.load(_PARAMETER_CONTEXTS[ctxt_name])
        pdict.add_context(param_context)
    return pdict
コード例 #6
0
ファイル: granule_utils.py プロジェクト: Bobfrat/coi-services
    def sync_rdt_with_coverage(self, coverage=None, tdoa=None, start_time=None, end_time=None, stride_time=None, parameters=None):
        '''
        Builds a granule based on the coverage
        '''
        if coverage is None:
            coverage = self.coverage

        slice_ = slice(None) # Defaults to all values
        if tdoa is not None and isinstance(tdoa,slice):
            slice_ = tdoa

        elif stride_time is not None:
            validate_is_instance(start_time, Number, 'start_time must be a number for striding.')
            validate_is_instance(end_time, Number, 'end_time must be a number for striding.')
            validate_is_instance(stride_time, Number, 'stride_time must be a number for striding.')
            ugly_range = np.arange(start_time, end_time, stride_time)
            idx_values = [TimeUtils.get_relative_time(coverage,i) for i in ugly_range]
            slice_ = [idx_values]

        elif not (start_time is None and end_time is None):
            time_var = coverage._temporal_param_name
            uom = coverage.get_parameter_context(time_var).uom
            if start_time is not None:
                start_units = TimeUtils.ts_to_units(uom,start_time)
                log.info('Units: %s', start_units)
                start_idx = TimeUtils.get_relative_time(coverage,start_units)
                log.info('Start Index: %s', start_idx)
                start_time = start_idx
            if end_time is not None:
                end_units   = TimeUtils.ts_to_units(uom,end_time)
                log.info('End units: %s', end_units)
                end_idx   = TimeUtils.get_relative_time(coverage,end_units)
                log.info('End index: %s',  end_idx)
                end_time = end_idx
            slice_ = slice(start_time,end_time,stride_time)
            log.info('Slice: %s', slice_)

        if parameters is not None:
            pdict = ParameterDictionary()
            params = set(coverage.list_parameters()).intersection(parameters)
            for param in params:
                pdict.add_context(coverage.get_parameter_context(param))
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            self.pdict = pdict
        else:
            rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
        
        fields = coverage.list_parameters()
        if parameters is not None:
            fields = set(fields).intersection(parameters)

        for d in fields:
            rdt[d] = coverage.get_parameter_values(d,tdoa=slice_)
        self.rdt = rdt # Sync
コード例 #7
0
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        return pdict
コード例 #8
0
    def _create_parameter(self):

        pdict = ParameterDictionary()

        pdict = self._add_location_time_ctxt(pdict)

        pres_ctxt = ParameterContext(
            'pressure', param_type=QuantityType(value_encoding=numpy.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext(
            'temp', param_type=QuantityType(value_encoding=numpy.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext(
            'conductivity',
            param_type=QuantityType(value_encoding=numpy.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        return pdict
コード例 #9
0
    def _get_data(cls, config):
        new_flst = get_safe(config, 'constraints.new_files', [])
        hdr_cnt = get_safe(config, 'header_count', SlocumParser.DEFAULT_HEADER_SIZE)
        for f in new_flst:
            try:
                parser = SlocumParser(f[0], hdr_cnt)
                #CBM: Not in use yet...
    #            ext_dset_res = get_safe(config, 'external_dataset_res', None)
    #            t_vname = ext_dset_res.dataset_description.parameters['temporal_dimension']
    #            x_vname = ext_dset_res.dataset_description.parameters['zonal_dimension']
    #            y_vname = ext_dset_res.dataset_description.parameters['meridional_dimension']
    #            z_vname = ext_dset_res.dataset_description.parameters['vertical_dimension']
    #            var_lst = ext_dset_res.dataset_description.parameters['variables']

                max_rec = get_safe(config, 'max_records', 1)
                dprod_id = get_safe(config, 'data_producer_id', 'unknown data producer')
                #tx_yml = get_safe(config, 'taxonomy')
                #ttool = TaxyTool.load(tx_yml) #CBM: Assertion inside RDT.__setitem__ requires same instance of TaxyTool
                pdict = ParameterDictionary.load(get_safe(config, 'param_dictionary'))

                cnt = calculate_iteration_count(len(parser.sensor_map), max_rec)
                for x in xrange(cnt):
                    #rdt = RecordDictionaryTool(taxonomy=ttool)
                    rdt = RecordDictionaryTool(param_dictionary=pdict)

                    for name in parser.sensor_map:
                        d = parser.data_map[name][x*max_rec:(x+1)*max_rec]
                        rdt[name]=d

                    #g = build_granule(data_producer_id=dprod_id, taxonomy=ttool, record_dictionary=rdt)
                    g = build_granule(data_producer_id=dprod_id, record_dictionary=rdt, param_dictionary=pdict)
                    yield g
            except SlocumParseException as spe:
                # TODO: Decide what to do here, raise an exception or carry on
                log.error('Error parsing data file: \'{0}\''.format(f))
コード例 #10
0
    def _construct_stream_and_publisher(self, stream_name, stream_config):

        if log.isEnabledFor(logging.TRACE):  # pragma: no cover
            log.trace("%r: _construct_stream_and_publisher: "
                      "stream_name:%r, stream_config:\n%s",
                      self._platform_id, stream_name,
                      self._pp.pformat(stream_config))

        decoder = IonObjectDeserializer(obj_registry=get_obj_registry())

        if 'stream_def_dict' not in stream_config:
            # should not happen: PlatformAgent._validate_configuration validates this.
            log.error("'stream_def_dict' key not in configuration for stream %r" % stream_name)
            return

        stream_def_dict = stream_config['stream_def_dict']
        stream_def_dict['type_'] = 'StreamDefinition'
        stream_def_obj = decoder.deserialize(stream_def_dict)
        self._stream_defs[stream_name] = stream_def_obj

        routing_key           = stream_config['routing_key']
        stream_id             = stream_config['stream_id']
        exchange_point        = stream_config['exchange_point']
        parameter_dictionary  = stream_def_dict['parameter_dictionary']
        log.debug("%r: got parameter_dictionary from stream_def_dict", self._platform_id)

        self._data_streams[stream_name] = stream_id
        self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary)
        stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key)
        publisher = self._create_publisher(stream_id, stream_route)
        self._data_publishers[stream_name] = publisher

        log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
コード例 #11
0
ファイル: coverage.py プロジェクト: tgiguere/coverage-model
            def _doload(self):
                # Make sure the coverage directory exists
                if not os.path.exists(pth):
                    raise SystemError('Cannot find specified coverage: {0}'.format(pth))

                # All appears well - load it up!
                self._persistence_layer = PersistenceLayer(root_dir, persistence_guid, mode=self.mode)

                self.name = self._persistence_layer.name
                self.spatial_domain = self._persistence_layer.sdom
                self.temporal_domain = self._persistence_layer.tdom

                self._range_dictionary = ParameterDictionary()
                self._range_value = RangeValues()

                self._bricking_scheme = self._persistence_layer.global_bricking_scheme

                self._in_memory_storage = False

                auto_flush_values = self._persistence_layer.auto_flush_values
                inline_data_writes = self._persistence_layer.inline_data_writes

                from coverage_model.persistence import PersistedStorage
                for parameter_name in self._persistence_layer.parameter_metadata.keys():
                    md = self._persistence_layer.parameter_metadata[parameter_name]
                    pc = md.parameter_context
                    self._range_dictionary.add_context(pc)
                    s = PersistedStorage(md, self._persistence_layer.brick_dispatcher, dtype=pc.param_type.storage_encoding, fill_value=pc.param_type.fill_value, mode=self.mode, inline_data_writes=inline_data_writes, auto_flush=auto_flush_values)
                    self._range_value[parameter_name] = get_value_class(param_type=pc.param_type, domain_set=pc.dom, storage=s)
コード例 #12
0
    def _construct_stream_and_publisher(self, stream_name, stream_config):

        if log.isEnabledFor(logging.TRACE):  # pragma: no cover
            log.trace("%r: _construct_stream_and_publisher: "
                      "stream_name:%r, stream_config:\n%s",
                      self._platform_id, stream_name,
                      self._pp.pformat(stream_config))

        decoder = IonObjectDeserializer(obj_registry=get_obj_registry())

        if 'stream_def_dict' not in stream_config:
            # should not happen: PlatformAgent._validate_configuration validates this.
            log.error("'stream_def_dict' key not in configuration for stream %r" % stream_name)
            return

        stream_def_dict = stream_config['stream_def_dict']
        stream_def_dict['type_'] = 'StreamDefinition'
        stream_def_obj = decoder.deserialize(stream_def_dict)
        self._stream_defs[stream_name] = stream_def_obj

        routing_key           = stream_config['routing_key']
        stream_id             = stream_config['stream_id']
        exchange_point        = stream_config['exchange_point']
        parameter_dictionary  = stream_def_dict['parameter_dictionary']
        log.debug("%r: got parameter_dictionary from stream_def_dict", self._platform_id)

        self._data_streams[stream_name] = stream_id
        self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary)
        stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key)
        publisher = self._create_publisher(stream_id, stream_route)
        self._data_publishers[stream_name] = publisher

        log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
コード例 #13
0
    def sync_rdt_with_coverage(self, coverage=None, tdoa=None, start_time=None, end_time=None, parameters=None):
        '''
        Builds a granule based on the coverage
        '''
        if coverage is None:
            coverage = self.coverage

        slice_ = slice(None) # Defaults to all values
        if tdoa is not None and isinstance(tdoa,slice):
            slice_ = tdoa

        elif not (start_time is None and end_time is None):
            uom = coverage.get_parameter_context('time').uom
            if start_time is not None:
                start_units = self.ts_to_units(uom,start_time)
                log.info('Units: %s', start_units)
                start_idx = self.get_relative_time(coverage,start_units)
                log.info('Start Index: %s', start_idx)
                start_time = start_idx
            if end_time is not None:
                end_units   = self.ts_to_units(uom,end_time)
                log.info('End units: %s', end_units)
                end_idx   = self.get_relative_time(coverage,end_units)
                log.info('End index: %s',  end_idx)
                end_time = end_idx
            slice_ = slice(start_time,end_time)
            log.info('Slice: %s', slice_)

        if parameters is not None:
            pdict = ParameterDictionary()
            params = set(coverage.list_parameters()).intersection(parameters)
            for param in params:
                pdict.add_context(coverage.get_parameter_context(param))
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            self.pdict = pdict
        else:
            rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
        
        fields = coverage.list_parameters()
        if parameters is not None:
            fields = set(fields).intersection(parameters)

        for d in fields:
            rdt[d] = coverage.get_parameter_values(d,tdoa=slice_)
        self.rdt = rdt # Sync
コード例 #14
0
    def rdt_to_granule(self, context, value_array, comp_val=None):

        pdict = ParameterDictionary()
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt["test"] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2["test"]

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
コード例 #15
0
ファイル: test_types.py プロジェクト: sfoley/coi-services
    def rdt_to_granule(self, context, value_array, comp_val=None):

        pdict = ParameterDictionary()
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['test'] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2['test']

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
コード例 #16
0
    def _setup_resources(self):
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        stream_id, stream_route, stream_def = self.create_stream_and_logger(name='fibonacci_stream', pdict=pdict)
        #        tx = TaxyTool()
        #        tx.add_taxonomy_set('data', 'external_data')

        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': stream_def,
            'data_producer_id': 'fibonacci_data_producer_id',
            'max_records': 4,
            }
コード例 #17
0
ファイル: test_types.py プロジェクト: ednad/coi-services
    def rdt_to_granule(self, context, value_array, comp_val=None):
        time = ParameterContext(name='time', param_type=QuantityType(value_encoding=np.float64))
        
        pdict = ParameterDictionary()
        pdict.add_context(time, is_temporal=True)
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['time'] = np.arange(len(value_array))
        rdt['test'] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2['test']

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
コード例 #18
0
    def load_from_granule(cls, g):
        """
        @brief return an instance of Record Dictionary Tool from a granule. Used when a granule is received in a message
        """
        if g.param_dictionary:
            result = cls(param_dictionary=ParameterDictionary.load(g.param_dictionary))

        else:
            result = cls(TaxyTool(g.taxonomy))

        result._rd = g.record_dictionary
        if result._rd.has_key(0):
            result._shp = result._rd[0].shape
        return result
コード例 #19
0
    def load_from_granule(cls, g):
        if isinstance(g.param_dictionary, str):
            instance = cls(stream_definition_id=g.param_dictionary, locator=g.locator)
            pdict = RecordDictionaryTool.pdict_from_stream_def(g.param_dictionary)
            instance._pdict = ParameterDictionary.load(pdict)
        
        else:
            instance = cls(param_dictionary=g.param_dictionary, locator=g.locator)
            instance._pdict = ParameterDictionary.load(g.param_dictionary)
        
       
        if g.domain:
            instance._shp = (g.domain[0],)
        
        for k,v in g.record_dictionary.iteritems():
            if v is not None:
                ptype = instance._pdict.get_context(k).param_type
                paramval = get_value_class(ptype, domain_set = instance.domain)
                paramval[:] = v
                paramval.storage._storage.flags.writeable = False

                instance._rd[k] = paramval
        
        return instance
コード例 #20
0
    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0


        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.temp = ParameterDictionary()
        self.temp.add_context(t_ctxt)
        self.temp.add_context(lat_ctxt)
        self.temp.add_context(lon_ctxt)
        self.temp.add_context(height_ctxt)
        self.temp.add_context(temp_ctxt)
        self.temp.add_context(data_ctxt)
コード例 #21
0
    def setUp(self):

        self._tx = TaxyTool()
        self._tx.add_taxonomy_set('temp', 'long_temp_name')
        self._tx.add_taxonomy_set('cond', 'long_cond_name')
        self._tx.add_taxonomy_set('pres', 'long_pres_name')
        self._tx.add_taxonomy_set('rdt')
        self._tx.add_taxonomy_set('rdt2')
        # map is {<local name>: <granule name or path>}

        self._rdt = RecordDictionaryTool(taxonomy=self._tx)

        self._pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        self._pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        self._pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        self._pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        self._pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        self._pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        self._pdict.add_context(pres_ctxt)

        self._rdt_pdict = RecordDictionaryTool(param_dictionary=self._pdict)
コード例 #22
0
    def _get_data(cls, config):
        """
        Retrieves config['constraints']['count'] number of random samples of length config['constraints']['array_len']
        @param config Dict of configuration parameters - must contain ['constraints']['count'] and ['constraints']['count']
        """
        array_len = get_safe(config, 'constraints.array_len',1)

        max_rec = get_safe(config, 'max_records', 1)
        dprod_id = get_safe(config, 'data_producer_id')
        #tx_yml = get_safe(config, 'taxonomy')
        #ttool = TaxyTool.load(tx_yml)
        pdict = ParameterDictionary.load(get_safe(config, 'param_dictionary'))

        arr = npr.random_sample(array_len)
        log.debug('Array to send using max_rec={0}: {1}'.format(max_rec, arr))
        cnt = calculate_iteration_count(arr.size, max_rec)
        for x in xrange(cnt):
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            d = arr[x*max_rec:(x+1)*max_rec]
            rdt['dummy'] = d
            g = rdt.to_granule()
            yield g
コード例 #23
0
    def _get_data(cls, config):
        """
        A generator that retrieves config['constraints']['count'] number of sequential Fibonacci numbers
        @param config Dict of configuration parameters - must contain ['constraints']['count']
        """
        cnt = get_safe(config,'constraints.count',1)

        max_rec = get_safe(config, 'max_records', 1)
        dprod_id = get_safe(config, 'data_producer_id')
        #tx_yml = get_safe(config, 'taxonomy')
        #ttool = TaxyTool.load(tx_yml)
        pdict = ParameterDictionary.load(get_safe(config, 'param_dictionary'))

        def fibGenerator():
            """
            A Fibonacci sequence generator
            """
            count = 0
            ret = []
            a, b = 1, 1
            while 1:
                count += 1
                ret.append(a)
                if count == max_rec:
                    yield np.array(ret)
                    ret=[]
                    count = 0

                a, b = b, a + b

        gen=fibGenerator()
        cnt = calculate_iteration_count(cnt, max_rec)
        for i in xrange(cnt):
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            d = gen.next()
            rdt['data'] = d
            g = rdt.to_granule()
            yield g
コード例 #24
0
    def _do_build(self):
        # Reset things to ensure we don't munge everything
        self._reference_covs = collections.OrderedDict()
        self._range_dictionary = ParameterDictionary()
        self._range_value = RangeValues()
        self._reference_covs = self._build_ordered_coverage_dict()

        for parameter_name in self._persistence_layer.parameter_metadata:
            md = self._persistence_layer.parameter_metadata[parameter_name]
            mm = self._persistence_layer.master_manager
            pc = md.parameter_context

            # Assign the coverage's domain object(s)
            self._assign_domain(pc)

            # Get the callbacks for ParameterFunctionType parameters
            if hasattr(pc, '_pval_callback'):
                pc._pval_callback = self.get_parameter_values
                pc._pctxt_callback = self.get_parameter_context
            self._range_dictionary.add_context(pc)
            s = PostgresPersistedStorage(md, metadata_manager=mm, parameter_context=pc, dtype=pc.param_type.storage_encoding, fill_value=pc.param_type.fill_value, mode=self._persistence_layer.mode)
            self._persistence_layer.value_list[parameter_name] = s
            self._range_value[parameter_name] = get_value_class(param_type=pc.param_type, domain_set=pc.dom, storage=s)
コード例 #25
0
    def _construct_stream_and_publisher(self, stream_name, stream_config):

        # granule_publish_rate
        # records_per_granule

        if log.isEnabledFor(logging.TRACE):  # pragma: no cover
            log.trace("%r: _construct_stream_and_publisher: "
                      "stream_name:%r, stream_config:\n%s",
                      self._platform_id, stream_name,
                      self._pp.pformat(stream_config))

        decoder = IonObjectDeserializer(obj_registry=get_obj_registry())

        if 'stream_def_dict' in stream_config:
            stream_def_dict = stream_config['stream_def_dict']
            stream_def_dict['type_'] = 'StreamDefinition'
            stream_def_obj = decoder.deserialize(stream_def_dict)
            self._stream_defs[stream_name] = stream_def_obj
            log.debug("%r: using stream_def_dict", self._platform_id)

        else:  # TODO this case to be removed.
            stream_definition_ref = stream_config['stream_definition_ref']
            self._stream_defs[stream_name] = stream_definition_ref
            log.debug("%r: using stream_definition_ref", self._platform_id)

        routing_key           = stream_config['routing_key']
        stream_id             = stream_config['stream_id']
        exchange_point        = stream_config['exchange_point']
        parameter_dictionary  = stream_config['parameter_dictionary']

        self._data_streams[stream_name] = stream_id
        self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary)
        stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key)
        publisher = self._create_publisher(stream_id, stream_route)
        self._data_publishers[stream_name] = publisher

        log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
コード例 #26
0
    def _create_parameter(self):

        pdict = ParameterDictionary()

        pdict = self._add_location_time_ctxt(pdict)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=numpy.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        return pdict
コード例 #27
0
    def rdt_to_granule(self, context, value_array, comp_val=None):
        time = ParameterContext(
            name='time', param_type=QuantityType(value_encoding=np.float64))

        pdict = ParameterDictionary()
        pdict.add_context(time, is_temporal=True)
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['time'] = np.arange(len(value_array))
        rdt['test'] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2['test']

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
コード例 #28
0
ファイル: test_types.py プロジェクト: sfoley/coi-services
    def cov_io(self, context, value_array, comp_val=None):
        pdict = ParameterDictionary()
        time = ParameterContext(
            name='time', param_type=QuantityType(value_encoding=np.float64))
        pdict.add_context(context)
        pdict.add_context(time, True)
        # Construct temporal and spatial Coordinate Reference System objects
        tcrs = CRS([AxisTypeEnum.TIME])
        scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])

        # Construct temporal and spatial Domain objects
        tdom = GridDomain(GridShape('temporal', [0]), tcrs,
                          MutabilityEnum.EXTENSIBLE)  # 1d (timeline)
        sdom = GridDomain(GridShape('spatial',
                                    [0]), scrs, MutabilityEnum.IMMUTABLE
                          )  # 0d spatial topology (station/trajectory)

        # Instantiate the SimplexCoverage providing the ParameterDictionary, spatial Domain and temporal Domain
        cov = SimplexCoverage('test_data',
                              create_guid(),
                              'sample coverage_model',
                              parameter_dictionary=pdict,
                              temporal_domain=tdom,
                              spatial_domain=sdom)
        self.addCleanup(shutil.rmtree, cov.persistence_dir)

        cov.insert_timesteps(len(value_array))
        cov.set_parameter_values('test',
                                 tdoa=slice(0, len(value_array)),
                                 value=value_array)
        comp_val = comp_val if comp_val is not None else value_array
        testval = cov.get_parameter_values('test')
        try:
            np.testing.assert_array_equal(testval, comp_val)
        except:
            print repr(value_array)
            raise
コード例 #29
0
    def get_param_dict(self):
        pdict = ParameterDictionary()

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=np.float64))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=np.float64))
        pres_ctxt.uom = 'unknown'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext('temperature', param_type=QuantityType(value_encoding=np.float64))
        temp_ctxt.uom = 'unknown'
        temp_ctxt.fill_value = 0x0
        pdict.add_context(temp_ctxt)

        oxy_ctxt = ParameterContext('oxygen', param_type=QuantityType(value_encoding=np.float64))
        oxy_ctxt.uom = 'unknown'
        oxy_ctxt.fill_value = 0x0
        pdict.add_context(oxy_ctxt)

        internal_ts_ctxt = ParameterContext(name='internal_timestamp', param_type=QuantityType(value_encoding=np.float64))
        internal_ts_ctxt._derived_from_name = 'time'
        internal_ts_ctxt.uom = 'seconds'
        internal_ts_ctxt.fill_value = -1
        pdict.add_context(internal_ts_ctxt, is_temporal=True)

        driver_ts_ctxt = ParameterContext(name='driver_timestamp', param_type=QuantityType(value_encoding=np.float64))
        driver_ts_ctxt._derived_from_name = 'time'
        driver_ts_ctxt.uom = 'seconds'
        driver_ts_ctxt.fill_value = -1
        pdict.add_context(driver_ts_ctxt)

        return pdict
コード例 #30
0
    def _get_data(cls, config):
        """
        Retrieves config['constraints']['count'] number of random samples of length config['constraints']['array_len']
        @param config Dict of configuration parameters - must contain ['constraints']['count'] and ['constraints']['count']
        """
        ext_dset_res = get_safe(config, 'external_dataset_res', None)

        # Get the Dataset object from the config (should have been instantiated in _init_acquisition_cycle)
        ds=get_safe(config, 'dataset_object')
        if ext_dset_res and ds:
            t_vname = ext_dset_res.dataset_description.parameters['temporal_dimension']
            x_vname = ext_dset_res.dataset_description.parameters['zonal_dimension']
            y_vname = ext_dset_res.dataset_description.parameters['meridional_dimension']
            z_vname = ext_dset_res.dataset_description.parameters['vertical_dimension']
            var_lst = ext_dset_res.dataset_description.parameters['variables']

            t_slice = get_safe(config, 'constraints.temporal_slice', (slice(0,1)))
            #TODO: Using 'eval' here is BAD - need to find a less sketchy way to pass constraints
            if isinstance(t_slice,str):
                t_slice=eval(t_slice)

            lon = ds.variables[x_vname][:]
            lat = ds.variables[y_vname][:]
            z = ds.variables[z_vname][:]

            t_arr = ds.variables[t_vname][t_slice]
            data_arrays = {}
            for varn in var_lst:
                data_arrays[varn] = ds.variables[varn][t_slice]

            max_rec = get_safe(config, 'max_records', 1)
            dprod_id = get_safe(config, 'data_producer_id', 'unknown data producer')
            #tx_yml = get_safe(config, 'taxonomy')
            #ttool = TaxyTool.load(tx_yml) #CBM: Assertion inside RDT.__setitem__ requires same instance of TaxyTool
            pdict = ParameterDictionary.load(get_safe(config, 'param_dictionary'))

            cnt = calculate_iteration_count(t_arr.size, max_rec)
            for x in xrange(cnt):
                ta = t_arr[x*max_rec:(x+1)*max_rec]

                # Make a 'master' RecDict
                #rdt = RecordDictionaryTool(taxonomy=ttool)
                rdt = RecordDictionaryTool(param_dictionary=pdict)
                # Make a 'coordinate' RecDict
                #rdt_c = RecordDictionaryTool(taxonomy=ttool)
                #rdt_c = RecordDictionaryTool(param_dictionary=pdict)
                # Make a 'data' RecDict
                #rdt_d = RecordDictionaryTool(taxonomy=ttool)
                #rdt_d = RecordDictionaryTool(param_dictionary=pdict)

                # Assign values to the coordinate RecDict
                rdt[x_vname] = lon
                rdt[y_vname] = lat
                rdt[z_vname] = z

                # Assign values to the data RecDict
                rdt[t_vname] = ta
                for key, arr in data_arrays.iteritems():
                    d = arr[x*max_rec:(x+1)*max_rec]
                    rdt[key] = d

                # Add the coordinate and data RecDicts to the master RecDict
                #rdt['coords'] = rdt_c
                #rdt['data'] = rdt_d

                # Build and return a granule
                # CBM: ttool must be passed
                #g = build_granule(data_producer_id=dprod_id, taxonomy=ttool, record_dictionary=rdt)
                g = build_granule(data_producer_id=dprod_id, record_dictionary=rdt, param_dictionary=pdict)
                yield g

            ds.close()
コード例 #31
0
class ctd_L0_all(TransformDataProcess):
    """Model for a TransformDataProcess

    """

    incoming_stream_def = SBE37_CDM_stream_definition()

    def __init__(self):

        super(ctd_L0_all, self).__init__()

        # Make the stream definitions of the transform class attributes

        #outgoing_stream_pressure = L0_pressure_stream_definition()
        #outgoing_stream_temperature = L0_temperature_stream_definition()
        #outgoing_stream_conductivity = L0_conductivity_stream_definition()

        ### Taxonomies are defined before hand out of band... somehow.
    #    pres = TaxyTool()
    #    pres.add_taxonomy_set('pres','long name for pres')
    #    pres.add_taxonomy_set('lat','long name for latitude')
    #    pres.add_taxonomy_set('lon','long name for longitude')
    #    pres.add_taxonomy_set('height','long name for height')
    #    pres.add_taxonomy_set('time','long name for time')
    #    # This is an example of using groups it is not a normative statement about how to use groups
    #    pres.add_taxonomy_set('coordinates','This group contains coordinates...')
    #    pres.add_taxonomy_set('data','This group contains data...')
    #
    #    temp = TaxyTool()
    #    temp.add_taxonomy_set('temp','long name for temp')
    #    temp.add_taxonomy_set('lat','long name for latitude')
    #    temp.add_taxonomy_set('lon','long name for longitude')
    #    temp.add_taxonomy_set('height','long name for height')
    #    temp.add_taxonomy_set('time','long name for time')
    #    # This is an example of using groups it is not a normative statement about how to use groups
    #    temp.add_taxonomy_set('coordinates','This group contains coordinates...')
    #    temp.add_taxonomy_set('data','This group contains data...')
    #
    #    coord = TaxyTool()
    #    coord.add_taxonomy_set('cond','long name for cond')
    #    coord.add_taxonomy_set('lat','long name for latitude')
    #    coord.add_taxonomy_set('lon','long name for longitude')
    #    coord.add_taxonomy_set('height','long name for height')
    #    coord.add_taxonomy_set('time','long name for time')
    #    # This is an example of using groups it is not a normative statement about how to use groups
    #    coord.add_taxonomy_set('coordinates','This group contains coordinates...')
    #    coord.add_taxonomy_set('data','This group contains data...')

        ### Parameter dictionaries
        self.defining_parameter_dictionary()

        self.publisher = Publisher(to_name=NameTrio(get_sys_name(), str(uuid.uuid4())[0:6]))

    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'degree_Celsius'
        pres_ctxt.fill_value = 0e0

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0

        cond_ctxt = ParameterContext('cond', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.pres = ParameterDictionary()
        self.pres.add_context(t_ctxt)
        self.pres.add_context(lat_ctxt)
        self.pres.add_context(lon_ctxt)
        self.pres.add_context(height_ctxt)
        self.pres.add_context(pres_ctxt)
        self.pres.add_context(data_ctxt)

        self.temp = ParameterDictionary()
        self.temp.add_context(t_ctxt)
        self.temp.add_context(lat_ctxt)
        self.temp.add_context(lon_ctxt)
        self.temp.add_context(height_ctxt)
        self.temp.add_context(temp_ctxt)
        self.temp.add_context(data_ctxt)

        self.cond = ParameterDictionary()
        self.cond.add_context(t_ctxt)
        self.cond.add_context(lat_ctxt)
        self.cond.add_context(lon_ctxt)
        self.cond.add_context(height_ctxt)
        self.cond.add_context(cond_ctxt)
        self.cond.add_context(data_ctxt)

    def process(self, packet):

        """Processes incoming data!!!!
        """

        # Use the PointSupplementStreamParser to pull data from a granule
        #psd = PointSupplementStreamParser(stream_definition=self.incoming_stream_def, stream_granule=packet)
        rdt = RecordDictionaryTool.load_from_granule(packet)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        conductivity = get_safe(rdt, 'cond') #psd.get_values('conductivity')
        pressure = get_safe(rdt, 'pres') #psd.get_values('pressure')
        temperature = get_safe(rdt, 'temp') #psd.get_values('temperature')

        longitude = get_safe(rdt, 'lon') # psd.get_values('longitude')
        latitude = get_safe(rdt, 'lat')  #psd.get_values('latitude')
        time = get_safe(rdt, 'time') # psd.get_values('time')
        height = get_safe(rdt, 'height') # psd.get_values('time')

        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got pressure: %s' % str(pressure))
        log.warn('Got temperature: %s' % str(temperature))

        g = self._build_granule_settings(self.cond, 'cond', conductivity, time, latitude, longitude, height)

        # publish a granule
        self.cond_publisher = self.publisher
        self.cond_publisher.publish(g)

        g = self._build_granule_settings(self.temp, 'temp', temperature, time, latitude, longitude, height)

        # publish a granule
        self.temp_publisher = self.publisher
        self.temp_publisher.publish(g)

        g = self._build_granule_settings(self.pres, 'pres', pressure, time, latitude, longitude, height)

        # publish a granule
        self.pres_publisher = self.publisher
        self.pres_publisher.publish(g)

    def _build_granule_settings(self, param_dictionary=None, field_name='', value=None, time=None, latitude=None, longitude=None, height=None):

        root_rdt = RecordDictionaryTool(param_dictionary=param_dictionary)

        #data_rdt = RecordDictionaryTool(taxonomy=taxonomy)

        root_rdt[field_name] = value

        #coor_rdt = RecordDictionaryTool(taxonomy=taxonomy)

        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

        #todo: use only flat dicts for now, may change later...
#        root_rdt['coordinates'] = coor_rdt
#        root_rdt['data'] = data_rdt

        log.debug("ctd_L0_all:_build_granule_settings: logging published Record Dictionary:\n %s", str(root_rdt.pretty_print()))

        return build_granule(data_producer_id='ctd_L0', param_dictionary=param_dictionary, record_dictionary=root_rdt)
コード例 #32
0
    def get_param_dict(self):
        pdict = ParameterDictionary()

        cond_ctxt = ParameterContext(
            'conductivity', param_type=QuantityType(value_encoding=np.float64))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext(
            'pressure', param_type=QuantityType(value_encoding=np.float64))
        pres_ctxt.uom = 'unknown'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext(
            'temperature', param_type=QuantityType(value_encoding=np.float64))
        temp_ctxt.uom = 'unknown'
        temp_ctxt.fill_value = 0x0
        pdict.add_context(temp_ctxt)

        oxy_ctxt = ParameterContext(
            'oxygen', param_type=QuantityType(value_encoding=np.float64))
        oxy_ctxt.uom = 'unknown'
        oxy_ctxt.fill_value = 0x0
        pdict.add_context(oxy_ctxt)

        internal_ts_ctxt = ParameterContext(
            name='internal_timestamp',
            param_type=QuantityType(value_encoding=np.float64))
        internal_ts_ctxt._derived_from_name = 'time'
        internal_ts_ctxt.uom = 'seconds'
        internal_ts_ctxt.fill_value = -1
        pdict.add_context(internal_ts_ctxt, is_temporal=True)

        driver_ts_ctxt = ParameterContext(
            name='driver_timestamp',
            param_type=QuantityType(value_encoding=np.float64))
        driver_ts_ctxt._derived_from_name = 'time'
        driver_ts_ctxt.uom = 'seconds'
        driver_ts_ctxt.fill_value = -1
        pdict.add_context(driver_ts_ctxt)

        return pdict
コード例 #33
0
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        return pdict
コード例 #34
0
ファイル: granule_utils.py プロジェクト: Bobfrat/coi-services
    def create_parameters(cls):
        '''
        WARNING: This method is a wrapper intended only for tests, it should not be used in production code.
        It probably will not align to most datasets.
        '''
        pdict = ParameterDictionary()
        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0
        pdict.add_context(data_ctxt)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        sal_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=np.float32))
        sal_ctxt.uom = 'PSU'
        sal_ctxt.fill_value = 0x0
        pdict.add_context(sal_ctxt)

        dens_ctxt = ParameterContext('density', param_type=QuantityType(value_encoding=np.float32))
        dens_ctxt.uom = 'unknown'
        dens_ctxt.fill_value = 0x0
        pdict.add_context(dens_ctxt)

        return pdict
コード例 #35
0
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        return pdict
コード例 #36
0
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier',
            ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(RT.DataProduct,
            name='usgs_parsed_product',
            description='parsed usgs product',
            temporal_domain = tdom,
            spatial_domain = sdom)

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
                                                    stream_definition_id=streamdef_id,
                                                    parameter_dictionary=parameter_dictionary)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

#        ttool = TaxyTool()
#        ttool.add_taxonomy_set('time','time')
#        ttool.add_taxonomy_set('lon','longitude')
#        ttool.add_taxonomy_set('lat','latitude')
#        ttool.add_taxonomy_set('z','water depth')
#        ttool.add_taxonomy_set('water_temperature', 'average water temperature')
#        ttool.add_taxonomy_set('water_temperature_bottom','water temperature at bottom of water column')
#        ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column')
#        ttool.add_taxonomy_set('streamflow', 'flow velocity of stream')
#        ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water')
#        ttool.add_taxonomy_set('data_qualifier','data qualifier flag')
#
#        ttool.add_taxonomy_set('coords','This group contains coordinate parameters')
#        ttool.add_taxonomy_set('data','This group contains data parameters')

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='usgs',stream_id=stream_id)

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding = numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            #'taxonomy':ttool.dump(),
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':4,
        }
コード例 #37
0
    def create_parameters(cls):
        '''
        WARNING: This method is a wrapper intended only for tests, it should not be used in production code.
        It probably will not align to most datasets.
        '''
        pdict = ParameterDictionary()
        t_ctxt = ParameterContext(
            'time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext(
            'lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext(
            'lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext(
            'temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext(
            'conductivity', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        data_ctxt = ParameterContext(
            'data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0
        pdict.add_context(data_ctxt)

        pres_ctxt = ParameterContext(
            'pressure', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        sal_ctxt = ParameterContext(
            'salinity', param_type=QuantityType(value_encoding=np.float32))
        sal_ctxt.uom = 'PSU'
        sal_ctxt.fill_value = 0x0
        pdict.add_context(sal_ctxt)

        dens_ctxt = ParameterContext(
            'density', param_type=QuantityType(value_encoding=np.float32))
        dens_ctxt.uom = 'unknown'
        dens_ctxt.fill_value = 0x0
        pdict.add_context(dens_ctxt)

        return pdict
コード例 #38
0
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('c_wpt_y_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_water_cond', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_y_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_inflection_holdoff', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_m_present_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_leakdetect_voltage_forward', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_bb3slo_b660_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_science_send_all', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_gps_status', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_water_vx', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_water_vy', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_heading', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_fl3slo_chlor_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_gain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_vacuum', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_min_water_depth', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_gps_lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_veh_temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('f_fin_offset', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_hardover_holdoff', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_alt_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_present_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_heading', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_bb3slo_b532_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_fl3slo_cdom_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_fin', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('x_cycle_overrun_in_ms', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_water_pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_igain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_fl3slo_phyco_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_battpos', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_bb3slo_b470_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_gps_lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_ctd41cp_timestamp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_wpt_x_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_ballast_pumped', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('x_lmc_xy_source', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_avg_speed', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_water_temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_pitch_ap_gain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_roll', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_tot_num_inflections', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_x_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_pitch_ap_deadband', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_final_water_vy', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_final_water_vx', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_water_depth', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_leakdetect_voltage', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_pitch_max_delta_battpos', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_coulomb_amphr', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_pitch', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        return pdict
コード例 #39
0
    def sync_rdt_with_coverage(self,
                               coverage=None,
                               tdoa=None,
                               start_time=None,
                               end_time=None,
                               stride_time=None,
                               parameters=None):
        '''
        Builds a granule based on the coverage
        '''
        if coverage is None:
            coverage = self.coverage

        slice_ = slice(None)  # Defaults to all values
        if tdoa is not None and isinstance(tdoa, slice):
            slice_ = tdoa

        elif stride_time is not None:
            validate_is_instance(start_time, Number,
                                 'start_time must be a number for striding.')
            validate_is_instance(end_time, Number,
                                 'end_time must be a number for striding.')
            validate_is_instance(stride_time, Number,
                                 'stride_time must be a number for striding.')
            ugly_range = np.arange(start_time, end_time, stride_time)
            idx_values = [
                TimeUtils.get_relative_time(coverage, i) for i in ugly_range
            ]
            slice_ = [idx_values]

        elif not (start_time is None and end_time is None):
            time_var = coverage._temporal_param_name
            uom = coverage.get_parameter_context(time_var).uom
            if start_time is not None:
                start_units = TimeUtils.ts_to_units(uom, start_time)
                log.info('Units: %s', start_units)
                start_idx = TimeUtils.get_relative_time(coverage, start_units)
                log.info('Start Index: %s', start_idx)
                start_time = start_idx
            if end_time is not None:
                end_units = TimeUtils.ts_to_units(uom, end_time)
                log.info('End units: %s', end_units)
                end_idx = TimeUtils.get_relative_time(coverage, end_units)
                log.info('End index: %s', end_idx)
                end_time = end_idx
            slice_ = slice(start_time, end_time, stride_time)
            log.info('Slice: %s', slice_)

        if parameters is not None:
            pdict = ParameterDictionary()
            params = set(coverage.list_parameters()).intersection(parameters)
            for param in params:
                pdict.add_context(coverage.get_parameter_context(param))
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            self.pdict = pdict
        else:
            rdt = RecordDictionaryTool(
                param_dictionary=coverage.parameter_dictionary)

        fields = coverage.list_parameters()
        if parameters is not None:
            fields = set(fields).intersection(parameters)

        for d in fields:
            rdt[d] = coverage.get_parameter_values(d, tdoa=slice_)
        self.rdt = rdt  # Sync
コード例 #40
0
    def _create_input_param_dict_for_test(self, parameter_dict_name = ''):

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('float64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1900'
        pdict.add_context(t_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'Siemens_per_meter'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'Pascal'
        pdict.add_context(pres_ctxt)

        if parameter_dict_name == 'input_param_for_L0':
            temp_ctxt = ParameterContext('temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        else:
            temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))

        temp_ctxt.uom = 'degree_kelvin'
        pdict.add_context(temp_ctxt)

        dens_ctxt = ParameterContext('density', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        dens_ctxt.uom = 'g/m'
        pdict.add_context(dens_ctxt)

        sal_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        sal_ctxt.uom = 'PSU'
        pdict.add_context(sal_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            ctxt_id = self.dataset_management.create_parameter_context(pc_k, pc[1].dump())
            pc_list.append(ctxt_id)
            if parameter_dict_name == 'input_param_for_L0':
                self.addCleanup(self.dataset_management.delete_parameter_context,ctxt_id)
            elif pc[1].name == 'temp':
                self.addCleanup(self.dataset_management.delete_parameter_context,ctxt_id)

        pdict_id = self.dataset_management.create_parameter_dictionary(parameter_dict_name, pc_list)
        self.addCleanup(self.dataset_management.delete_parameter_dictionary, pdict_id)

        return pdict_id
コード例 #41
0
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(name='example data agent',
                                   handler_module=self.DVR_CONFIG['dvr_mod'],
                                   handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance(
            name='example dataset agent instance')
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(name='example data provider',
                                     institution=Institution(),
                                     contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(name='example datasource',
                          protocol_type='FILE',
                          institution=Institution(),
                          contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'ruv_test_dataset'
        dset = ExternalDataset(name=ds_name,
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/ruv/'
        dset.dataset_description.parameters[
            'list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv'
        dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M'
        dset.dataset_description.parameters[
            'date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = []

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='ruv_model')
        #dsrc_model.model = 'RUV'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=eda_inst_id)

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext(
            'data',
            param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            pc_list.append(dms_cli.create_parameter_context(
                pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('ruv_param_dict',
                                                       pc_list)

        streamdef_id = pubsub_cli.create_stream_definition(
            name="ruv",
            description="stream def for ruv testing",
            parameter_dictionary_id=pdict_id)

        dprod = IonObject(RT.DataProduct,
                          name='ruv_parsed_product',
                          description='parsed ruv product')

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(
            data_product=dprod, stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id,
                                     data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id,
                                              predicate=PRED.hasStream,
                                              object_type=RT.Stream,
                                              id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({
            'ExternalDataset': ds_id,
            'ExternalDataProvider': ext_dprov_id,
            'DataSource': ext_dsrc_id,
            'DataSourceModel': ext_dsrc_model_id,
            'DataProducer': dproducer_id,
            'DataProduct': dproduct_id,
            'Stream': stream_id
        }))

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='ruv',
                                                           stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'external_dataset_res': dset,
            'param_dictionary': pdict.dump(),
            'data_producer_id':
            dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 20,
        }
コード例 #42
0
    def _create_input_param_dict_for_test(self, parameter_dict_name=''):

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext(
            'time',
            param_type=QuantityType(value_encoding=numpy.dtype('float64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1900'
        pdict.add_context(t_ctxt)

        cond_ctxt = ParameterContext(
            'conductivity',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = ''
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext(
            'pressure',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = ''
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext(
            'temperature',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = ''
        pdict.add_context(temp_ctxt)

        dens_ctxt = ParameterContext(
            'density',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        dens_ctxt.uom = ''
        pdict.add_context(dens_ctxt)

        sal_ctxt = ParameterContext(
            'salinity',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        sal_ctxt.uom = ''
        pdict.add_context(sal_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            ctxt_id = self.dataset_management.create_parameter_context(
                pc_k, pc[1].dump())
            pc_list.append(ctxt_id)
            self.addCleanup(self.dataset_management.delete_parameter_context,
                            ctxt_id)

        pdict_id = self.dataset_management.create_parameter_dictionary(
            parameter_dict_name, pc_list)
        self.addCleanup(self.dataset_management.delete_parameter_dictionary,
                        pdict_id)

        return pdict_id
コード例 #43
0
def adhoc_get_parameter_dictionary(stream_name):
    """
    @param stream_name IGNORED in this adhoc function; it returns the same
                ParameterDictionary definition always.
    @retval corresponding ParameterDictionary.
    """

    #@TODO Luke - Maybe we can make this a bit more versatile, we could make this a standard pdict...

    pdict = ParameterDictionary()

#    ctxt = ParameterContext('value', param_type=QuantityType(value_encoding=numpy.float32))
    ctxt = ParameterContext('value', param_type=QuantityType(value_encoding=numpy.dtype('float64')))
    ctxt.uom = 'unknown'
    ctxt.fill_value = 0e0
    pdict.add_context(ctxt)

    ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
    ctxt.axis = AxisTypeEnum.TIME
    ctxt.uom = 'seconds since 01-01-1970'
    pdict.add_context(ctxt)

    ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
    ctxt.axis = AxisTypeEnum.LON
    ctxt.uom = 'degree_east'
    pdict.add_context(ctxt)

    ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
    ctxt.axis = AxisTypeEnum.LAT
    ctxt.uom = 'degree_north'
    pdict.add_context(ctxt)

    ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
    ctxt.axis = AxisTypeEnum.HEIGHT
    ctxt.uom = 'unknown'
    pdict.add_context(ctxt)

    return pdict