Ejemplo n.º 1
0
    def cov_io(self, context, value_array, comp_val=None):
        pdict = ParameterDictionary()
        time = ParameterContext(name='time', param_type=QuantityType(value_encoding=np.float64))
        pdict.add_context(context)
        pdict.add_context(time, True)
        # Construct temporal and spatial Coordinate Reference System objects
        tcrs = CRS([AxisTypeEnum.TIME])
        scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])

        # Construct temporal and spatial Domain objects
        tdom = GridDomain(GridShape('temporal', [0]), tcrs, MutabilityEnum.EXTENSIBLE) # 1d (timeline)
        sdom = GridDomain(GridShape('spatial', [0]), scrs, MutabilityEnum.IMMUTABLE) # 0d spatial topology (station/trajectory)

        # Instantiate the SimplexCoverage providing the ParameterDictionary, spatial Domain and temporal Domain
        cov = SimplexCoverage('test_data', create_guid(), 'sample coverage_model', parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom)

        cov.insert_timesteps(len(value_array))
        cov.set_parameter_values('test', tdoa=slice(0,len(value_array)), value=value_array)
        comp_val = comp_val if comp_val is not None else value_array
        testval = cov.get_parameter_values('test')
        try:
            np.testing.assert_array_equal(testval, comp_val)
        except:
            print repr(value_array)
            raise
    def _create_parameter(self):

        pdict = ParameterDictionary()

        pdict = self._add_location_time_ctxt(pdict)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=numpy.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        raw_fixed_ctxt = ParameterContext('raw_fixed', param_type=QuantityType(value_encoding=numpy.float32))
        raw_fixed_ctxt.uom = 'unknown'
        raw_fixed_ctxt.fill_value = 0e0
        pdict.add_context(raw_fixed_ctxt)

        raw_blob_ctxt = ParameterContext('raw_blob', param_type=QuantityType(value_encoding=numpy.float32))
        raw_blob_ctxt.uom = 'unknown'
        raw_blob_ctxt.fill_value = 0e0
        pdict.add_context(raw_blob_ctxt)

        return pdict
Ejemplo n.º 3
0
def get_param_dict(param_dict_name = None):
    raise NotImplementedError('This method has been replaced by DatasetManagementService, please use read_parameter_dictionary_by_name instead')
    # read the file just once, not every time needed
    global _PARAMETER_DICTIONARIES
    global _PARAMETER_CONTEXTS
    if not _PARAMETER_DICTIONARIES:
        param_dict_defs_file = "res/config/param_dict_defs.yml"
        with open(param_dict_defs_file, "r") as f_dict:
            dict_string = f_dict.read()
        _PARAMETER_DICTIONARIES = yaml.load(dict_string)

        param_context_defs_file = "res/config/param_context_defs.yml"
        with open(param_context_defs_file, "r") as f_ctxt:
            ctxt_string = f_ctxt.read()
        _PARAMETER_CONTEXTS = yaml.load(ctxt_string)

    # make sure we have the one requested
    context_names = _PARAMETER_DICTIONARIES[param_dict_name]
    for name in context_names:
        if not _PARAMETER_CONTEXTS.has_key(name):
            raise AssertionError('The parameter dict has a context that does not exist in the parameter context defs specified in yml: %s' % name)

    # package and ship
    pdict = ParameterDictionary()
    for ctxt_name in context_names:
        param_context = ParameterContext.load(_PARAMETER_CONTEXTS[ctxt_name])
        pdict.add_context(param_context)
    return pdict
Ejemplo n.º 4
0
def get_param_dict(param_dict_name=None):
    raise NotImplementedError(
        'This method has been replaced by DatasetManagementService, please use read_parameter_dictionary_by_name instead'
    )
    # read the file just once, not every time needed
    global _PARAMETER_DICTIONARIES
    global _PARAMETER_CONTEXTS
    if not _PARAMETER_DICTIONARIES:
        param_dict_defs_file = "res/config/param_dict_defs.yml"
        with open(param_dict_defs_file, "r") as f_dict:
            dict_string = f_dict.read()
        _PARAMETER_DICTIONARIES = yaml.load(dict_string)

        param_context_defs_file = "res/config/param_context_defs.yml"
        with open(param_context_defs_file, "r") as f_ctxt:
            ctxt_string = f_ctxt.read()
        _PARAMETER_CONTEXTS = yaml.load(ctxt_string)

    # make sure we have the one requested
    context_names = _PARAMETER_DICTIONARIES[param_dict_name]
    for name in context_names:
        if not _PARAMETER_CONTEXTS.has_key(name):
            raise AssertionError(
                'The parameter dict has a context that does not exist in the parameter context defs specified in yml: %s'
                % name)

    # package and ship
    pdict = ParameterDictionary()
    for ctxt_name in context_names:
        param_context = ParameterContext.load(_PARAMETER_CONTEXTS[ctxt_name])
        pdict.add_context(param_context)
    return pdict
Ejemplo n.º 5
0
    def sync_rdt_with_coverage(self, coverage=None, tdoa=None, start_time=None, end_time=None, stride_time=None, parameters=None):
        '''
        Builds a granule based on the coverage
        '''
        if coverage is None:
            coverage = self.coverage

        slice_ = slice(None) # Defaults to all values
        if tdoa is not None and isinstance(tdoa,slice):
            slice_ = tdoa

        elif stride_time is not None:
            validate_is_instance(start_time, Number, 'start_time must be a number for striding.')
            validate_is_instance(end_time, Number, 'end_time must be a number for striding.')
            validate_is_instance(stride_time, Number, 'stride_time must be a number for striding.')
            ugly_range = np.arange(start_time, end_time, stride_time)
            idx_values = [TimeUtils.get_relative_time(coverage,i) for i in ugly_range]
            slice_ = [idx_values]

        elif not (start_time is None and end_time is None):
            time_var = coverage._temporal_param_name
            uom = coverage.get_parameter_context(time_var).uom
            if start_time is not None:
                start_units = TimeUtils.ts_to_units(uom,start_time)
                log.info('Units: %s', start_units)
                start_idx = TimeUtils.get_relative_time(coverage,start_units)
                log.info('Start Index: %s', start_idx)
                start_time = start_idx
            if end_time is not None:
                end_units   = TimeUtils.ts_to_units(uom,end_time)
                log.info('End units: %s', end_units)
                end_idx   = TimeUtils.get_relative_time(coverage,end_units)
                log.info('End index: %s',  end_idx)
                end_time = end_idx
            slice_ = slice(start_time,end_time,stride_time)
            log.info('Slice: %s', slice_)

        if parameters is not None:
            pdict = ParameterDictionary()
            params = set(coverage.list_parameters()).intersection(parameters)
            for param in params:
                pdict.add_context(coverage.get_parameter_context(param))
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            self.pdict = pdict
        else:
            rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
        
        fields = coverage.list_parameters()
        if parameters is not None:
            fields = set(fields).intersection(parameters)

        for d in fields:
            rdt[d] = coverage.get_parameter_values(d,tdoa=slice_)
        self.rdt = rdt # Sync
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        return pdict
Ejemplo n.º 7
0
    def sync_rdt_with_coverage(self, coverage=None, tdoa=None, start_time=None, end_time=None, parameters=None):
        '''
        Builds a granule based on the coverage
        '''
        if coverage is None:
            coverage = self.coverage

        slice_ = slice(None) # Defaults to all values
        if tdoa is not None and isinstance(tdoa,slice):
            slice_ = tdoa

        elif not (start_time is None and end_time is None):
            uom = coverage.get_parameter_context('time').uom
            if start_time is not None:
                start_units = self.ts_to_units(uom,start_time)
                log.info('Units: %s', start_units)
                start_idx = self.get_relative_time(coverage,start_units)
                log.info('Start Index: %s', start_idx)
                start_time = start_idx
            if end_time is not None:
                end_units   = self.ts_to_units(uom,end_time)
                log.info('End units: %s', end_units)
                end_idx   = self.get_relative_time(coverage,end_units)
                log.info('End index: %s',  end_idx)
                end_time = end_idx
            slice_ = slice(start_time,end_time)
            log.info('Slice: %s', slice_)

        if parameters is not None:
            pdict = ParameterDictionary()
            params = set(coverage.list_parameters()).intersection(parameters)
            for param in params:
                pdict.add_context(coverage.get_parameter_context(param))
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            self.pdict = pdict
        else:
            rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
        
        fields = coverage.list_parameters()
        if parameters is not None:
            fields = set(fields).intersection(parameters)

        for d in fields:
            rdt[d] = coverage.get_parameter_values(d,tdoa=slice_)
        self.rdt = rdt # Sync
Ejemplo n.º 8
0
    def rdt_to_granule(self, context, value_array, comp_val=None):

        pdict = ParameterDictionary()
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt["test"] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2["test"]

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
Ejemplo n.º 9
0
    def rdt_to_granule(self, context, value_array, comp_val=None):

        pdict = ParameterDictionary()
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['test'] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2['test']

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
Ejemplo n.º 10
0
    def create(self, path):
        mkdir_silent(path)
        
        tcrs = CRS([AxisTypeEnum.TIME])
        scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT, AxisTypeEnum.HEIGHT])

        tdom = GridDomain(GridShape('temporal', [0]), tcrs, MutabilityEnum.EXTENSIBLE)
        sdom = GridDomain(GridShape('spatial', [0]), scrs, MutabilityEnum.IMMUTABLE) # Dimensionality is excluded for now
            
        pdict = ParameterDictionary()
        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0
        pdict.add_context(lon_ctxt)

        dens_ctxt = ParameterContext('data_quantity', param_type=QuantityType(value_encoding=np.float32))
        dens_ctxt.uom = 'unknown'
        dens_ctxt.fill_value = 0x0
        pdict.add_context(dens_ctxt)
        
        serial_ctxt = ParameterContext('data_array', param_type=ArrayType())
        serial_ctxt.uom = 'unknown'
        serial_ctxt.fill_value = 0x0
        pdict.add_context(serial_ctxt)
       
        guid = str(uuid.uuid4()).upper()

        self.path = path
        self.cov = SimplexCoverage(path, guid, name='test_cov', parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom)
    def _setup_resources(self):
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        stream_id, stream_route, stream_def = self.create_stream_and_logger(name='fibonacci_stream', pdict=pdict)
        #        tx = TaxyTool()
        #        tx.add_taxonomy_set('data', 'external_data')

        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'stream_def': stream_def,
            'data_producer_id': 'fibonacci_data_producer_id',
            'max_records': 4,
            }
Ejemplo n.º 12
0
    def rdt_to_granule(self, context, value_array, comp_val=None):
        time = ParameterContext(name='time', param_type=QuantityType(value_encoding=np.float64))
        
        pdict = ParameterDictionary()
        pdict.add_context(time, is_temporal=True)
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['time'] = np.arange(len(value_array))
        rdt['test'] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2['test']

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
Ejemplo n.º 13
0
    def _create_parameter(self):

        pdict = ParameterDictionary()

        pdict = self._add_location_time_ctxt(pdict)

        pres_ctxt = ParameterContext(
            'pressure', param_type=QuantityType(value_encoding=numpy.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext(
            'temp', param_type=QuantityType(value_encoding=numpy.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext(
            'conductivity',
            param_type=QuantityType(value_encoding=numpy.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        return pdict
Ejemplo n.º 14
0
    def rdt_to_granule(self, context, value_array, comp_val=None):
        time = ParameterContext(
            name='time', param_type=QuantityType(value_encoding=np.float64))

        pdict = ParameterDictionary()
        pdict.add_context(time, is_temporal=True)
        pdict.add_context(context)

        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['time'] = np.arange(len(value_array))
        rdt['test'] = value_array

        granule = rdt.to_granule()
        rdt2 = RecordDictionaryTool.load_from_granule(granule)

        testval = comp_val if comp_val is not None else value_array
        actual = rdt2['test']

        if isinstance(testval, basestring):
            self.assertEquals(testval, actual)
        else:
            np.testing.assert_array_equal(testval, actual)
Ejemplo n.º 15
0
def adhoc_get_parameter_dictionary(stream_name):
    """
    @param stream_name IGNORED in this adhoc function; it returns the same
                ParameterDictionary definition always.
    @retval corresponding ParameterDictionary.
    """

    #@TODO Luke - Maybe we can make this a bit more versatile, we could make this a standard pdict...

    pdict = ParameterDictionary()

#    ctxt = ParameterContext('value', param_type=QuantityType(value_encoding=numpy.float32))
    ctxt = ParameterContext('value', param_type=QuantityType(value_encoding=numpy.dtype('float64')))
    ctxt.uom = 'unknown'
    ctxt.fill_value = 0e0
    pdict.add_context(ctxt)

    ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
    ctxt.axis = AxisTypeEnum.TIME
    ctxt.uom = 'seconds since 01-01-1970'
    pdict.add_context(ctxt)

    ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
    ctxt.axis = AxisTypeEnum.LON
    ctxt.uom = 'degree_east'
    pdict.add_context(ctxt)

    ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
    ctxt.axis = AxisTypeEnum.LAT
    ctxt.uom = 'degree_north'
    pdict.add_context(ctxt)

    ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
    ctxt.axis = AxisTypeEnum.HEIGHT
    ctxt.uom = 'unknown'
    pdict.add_context(ctxt)

    return pdict
Ejemplo n.º 16
0
    def cov_io(self, context, value_array, comp_val=None):
        pdict = ParameterDictionary()
        time = ParameterContext(
            name='time', param_type=QuantityType(value_encoding=np.float64))
        pdict.add_context(context)
        pdict.add_context(time, True)
        # Construct temporal and spatial Coordinate Reference System objects
        tcrs = CRS([AxisTypeEnum.TIME])
        scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])

        # Construct temporal and spatial Domain objects
        tdom = GridDomain(GridShape('temporal', [0]), tcrs,
                          MutabilityEnum.EXTENSIBLE)  # 1d (timeline)
        sdom = GridDomain(GridShape('spatial',
                                    [0]), scrs, MutabilityEnum.IMMUTABLE
                          )  # 0d spatial topology (station/trajectory)

        # Instantiate the SimplexCoverage providing the ParameterDictionary, spatial Domain and temporal Domain
        cov = SimplexCoverage('test_data',
                              create_guid(),
                              'sample coverage_model',
                              parameter_dictionary=pdict,
                              temporal_domain=tdom,
                              spatial_domain=sdom)
        self.addCleanup(shutil.rmtree, cov.persistence_dir)

        cov.insert_timesteps(len(value_array))
        cov.set_parameter_values('test',
                                 tdoa=slice(0, len(value_array)),
                                 value=value_array)
        comp_val = comp_val if comp_val is not None else value_array
        testval = cov.get_parameter_values('test')
        try:
            np.testing.assert_array_equal(testval, comp_val)
        except:
            print repr(value_array)
            raise
Ejemplo n.º 17
0
class SimplexCoverage(AbstractCoverage):
    """
    A concrete implementation of AbstractCoverage consisting of 2 domains (temporal and spatial)
    and a collection of parameters associated with one or both of the domains.  Each parameter is defined by a
    ParameterContext object (provided via the ParameterDictionary) and has content represented by a concrete implementation
    of the AbstractParameterValue class.

    """
    def __init__(self, root_dir, persistence_guid, name=None, parameter_dictionary=None, temporal_domain=None, spatial_domain=None, mode=None, in_memory_storage=False, bricking_scheme=None, inline_data_writes=True, auto_flush_values=True):
        """
        Constructor for SimplexCoverage

        @param root_dir The root directory for storage of this coverage
        @param persistence_guid The persistence uuid for this coverage
        @param name The name of the coverage
        @param parameter_dictionary    a ParameterDictionary object expected to contain one or more valid ParameterContext objects
        @param spatial_domain  a concrete instance of AbstractDomain for the spatial domain component
        @param temporal_domain a concrete instance of AbstractDomain for the temporal domain component
        @param mode the file mode for the coverage; one of 'r', 'a', 'r+', or 'w'; defaults to 'r'
        @param in_memory_storage    if False (default), HDF5 persistence is used; otherwise, nothing is written to disk and all data is held in memory only
        @param bricking_scheme  the bricking scheme for the coverage; a dict of the form {'brick_size': #, 'chunk_size': #}
        @param inline_data_writes   if True (default), brick data is written as it is set; otherwise it is written out-of-band by worker processes or threads
        @param auto_flush_values    if True (default), brick data is flushed immediately; otherwise it is buffered until SimplexCoverage.flush_values() is called
        """
        AbstractCoverage.__init__(self, mode=mode)
        try:
            # Make sure root_dir and persistence_guid are both not None and are strings
            if not isinstance(root_dir, str) or not isinstance(persistence_guid, str):
                raise TypeError('\'root_dir\' and \'persistence_guid\' must be instances of str')

            root_dir = root_dir if not root_dir.endswith(persistence_guid) else os.path.split(root_dir)[0]

            pth=os.path.join(root_dir, persistence_guid)

            def _doload(self):
                # Make sure the coverage directory exists
                if not os.path.exists(pth):
                    raise SystemError('Cannot find specified coverage: {0}'.format(pth))

                # All appears well - load it up!
                self._persistence_layer = PersistenceLayer(root_dir, persistence_guid, mode=self.mode)

                self.name = self._persistence_layer.name
                self.spatial_domain = self._persistence_layer.sdom
                self.temporal_domain = self._persistence_layer.tdom

                self._range_dictionary = ParameterDictionary()
                self._range_value = RangeValues()

                self._bricking_scheme = self._persistence_layer.global_bricking_scheme

                self._in_memory_storage = False

                auto_flush_values = self._persistence_layer.auto_flush_values
                inline_data_writes = self._persistence_layer.inline_data_writes

                from coverage_model.persistence import PersistedStorage
                for parameter_name in self._persistence_layer.parameter_metadata.keys():
                    md = self._persistence_layer.parameter_metadata[parameter_name]
                    pc = md.parameter_context
                    self._range_dictionary.add_context(pc)
                    s = PersistedStorage(md, self._persistence_layer.brick_dispatcher, dtype=pc.param_type.storage_encoding, fill_value=pc.param_type.fill_value, mode=self.mode, inline_data_writes=inline_data_writes, auto_flush=auto_flush_values)
                    self._range_value[parameter_name] = get_value_class(param_type=pc.param_type, domain_set=pc.dom, storage=s)

            if name is None or parameter_dictionary is None:
                # This appears to be a load
                _doload(self)

            else:
                # This appears to be a new coverage
                # Make sure name and parameter_dictionary are not None
                if name is None or parameter_dictionary is None:
                    raise SystemError('\'name\' and \'parameter_dictionary\' cannot be None')

                # Make sure the specified root_dir exists
                if not in_memory_storage and not os.path.exists(root_dir):
                    raise SystemError('Cannot find specified \'root_dir\': {0}'.format(root_dir))

                # If the coverage directory exists, load it instead!!
                if os.path.exists(pth):
                    log.warn('The specified coverage already exists - performing load of \'{0}\''.format(pth))
                    _doload(self)
                    return

                # We've checked everything we can - this is a new coverage!!!

                # Check the mode - must be in 'a' for a new coverage
                if self.mode != 'a':
                    self.mode = 'a'

                self.name = name
                if temporal_domain is None:
                    self.temporal_domain = GridDomain(GridShape('temporal',[0]), CRS.standard_temporal(), MutabilityEnum.EXTENSIBLE)
                elif isinstance(temporal_domain, AbstractDomain):
                    self.temporal_domain = deepcopy(temporal_domain)
                else:
                    raise TypeError('\'temporal_domain\' must be an instance of AbstractDomain')

                if spatial_domain is None or isinstance(spatial_domain, AbstractDomain):
                    self.spatial_domain = deepcopy(spatial_domain)
                else:
                    raise TypeError('\'spatial_domain\' must be an instance of AbstractDomain')

                if not isinstance(parameter_dictionary, ParameterDictionary):
                    raise TypeError('\'parameter_dictionary\' must be of type ParameterDictionary')
                self._range_dictionary = ParameterDictionary()
                self._range_value = RangeValues()

                self._bricking_scheme = bricking_scheme or {'brick_size':10000,'chunk_size':500}

                self._in_memory_storage = in_memory_storage
                if self._in_memory_storage:
                    self._persistence_layer = InMemoryPersistenceLayer()
                else:
                    self._persistence_layer = PersistenceLayer(root_dir, persistence_guid, name=name, tdom=temporal_domain, sdom=spatial_domain, mode=self.mode, bricking_scheme=self._bricking_scheme, inline_data_writes=inline_data_writes, auto_flush_values=auto_flush_values)

                for o, pc in parameter_dictionary.itervalues():
                    self._append_parameter(pc)
        except:
            self._closed = True
            raise

    @classmethod
    def _fromdict(cls, cmdict, arg_masks=None):
        return super(SimplexCoverage, cls)._fromdict(cmdict, {'parameter_dictionary':'_range_dictionary'})

    @property
    def temporal_parameter_name(self):
        return self._range_dictionary.temporal_parameter_name

    @property
    def parameter_dictionary(self):
        return deepcopy(self._range_dictionary)

    @property
    def persistence_guid(self):
        if isinstance(self._persistence_layer, InMemoryPersistenceLayer):
            return None
        else:
            return self._persistence_layer.guid

    @property
    def persistence_dir(self):
        if isinstance(self._persistence_layer, InMemoryPersistenceLayer):
            return None
        else:
            return self._persistence_layer.master_manager.root_dir

    def append_parameter(self, parameter_context):
        """
        Append a ParameterContext to the coverage

        @deprecated use a ParameterDictionary during construction of the coverage
        """
        log.warn('SimplexCoverage.append_parameter() is deprecated: use a ParameterDictionary during construction of the coverage')
        self._append_parameter(parameter_context)

    def _append_parameter(self, parameter_context):
        """
        Appends a ParameterContext object to the internal set for this coverage.

        A <b>deep copy</b> of the supplied ParameterContext is added to self._range_dictionary.  An AbstractParameterValue of the type
        indicated by ParameterContext.param_type is added to self._range_value.  If the ParameterContext indicates that
        the parameter is a coordinate parameter, it is associated with the indicated axis of the appropriate CRS.

        @param parameter_context    The ParameterContext to append to the coverage <b>as a copy</b>
        @throws StandardError   If the ParameterContext.axis indicates that it is temporal and a temporal parameter
        already exists in the coverage
        """
        if self.closed:
            raise IOError('I/O operation on closed file')

        if self.mode == 'r':
            raise IOError('Coverage not open for writing: mode == \'{0}\''.format(self.mode))

        if not isinstance(parameter_context, ParameterContext):
            raise TypeError('\'parameter_context\' must be an instance of ParameterContext')

        # Create a deep copy of the ParameterContext
        pcontext = deepcopy(parameter_context)

        pname = pcontext.name

        no_sdom = self.spatial_domain is None

        ## Determine the correct array shape

        # Get the parameter variability; assign to VariabilityEnum.NONE if None
        pv=pcontext.variability or VariabilityEnum.NONE
        if no_sdom and pv in (VariabilityEnum.SPATIAL, VariabilityEnum.BOTH):
            log.warn('Provided \'parameter_context\' indicates Spatial variability, but coverage has no Spatial Domain')

        if pv == VariabilityEnum.TEMPORAL: # Only varies in the Temporal Domain
            pcontext.dom = DomainSet(self.temporal_domain.shape.extents, None)
        elif pv == VariabilityEnum.SPATIAL: # Only varies in the Spatial Domain
            pcontext.dom = DomainSet(None, self.spatial_domain.shape.extents)
        elif pv == VariabilityEnum.BOTH: # Varies in both domains
            # If the Spatial Domain is only a single point on a 0d Topology, the parameter's shape is that of the Temporal Domain only
            if no_sdom or (len(self.spatial_domain.shape.extents) == 1 and self.spatial_domain.shape.extents[0] == 0):
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, None)
            else:
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, self.spatial_domain.shape.extents)
        elif pv == VariabilityEnum.NONE: # No variance; constant
            # CBM TODO: Not sure we can have this constraint - precludes situations like a TextType with Variablity==None...
#            # This is a constant - if the ParameterContext is not a ConstantType, make it one with the default 'x' expr
#            if not isinstance(pcontext.param_type, ConstantType):
#                pcontext.param_type = ConstantType(pcontext.param_type)

            # The domain is the total domain - same value everywhere!!
            # If the Spatial Domain is only a single point on a 0d Topology, the parameter's shape is that of the Temporal Domain only
            if no_sdom or (len(self.spatial_domain.shape.extents) == 1 and self.spatial_domain.shape.extents[0] == 0):
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, None)
            else:
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, self.spatial_domain.shape.extents)
        else:
            # Should never get here...but...
            raise SystemError('Must define the variability of the ParameterContext: a member of VariabilityEnum')

        # Assign the pname to the CRS (if applicable) and select the appropriate domain (default is the spatial_domain)
        dom = self.spatial_domain
        if not pcontext.axis is None and AxisTypeEnum.is_member(pcontext.axis, AxisTypeEnum.TIME):
            dom = self.temporal_domain
            dom.crs.axes[pcontext.axis] = pcontext.name
        elif not no_sdom and (pcontext.axis in self.spatial_domain.crs.axes):
            dom.crs.axes[pcontext.axis] = pcontext.name

        self._range_dictionary.add_context(pcontext)
        s = self._persistence_layer.init_parameter(pcontext, self._bricking_scheme)
        self._range_value[pname] = get_value_class(param_type=pcontext.param_type, domain_set=pcontext.dom, storage=s)

    def get_parameter(self, param_name):
        """
        Get a Parameter object by name

        The Parameter object contains the ParameterContext and AbstractParameterValue associated with the param_name

        @param param_name  The local name of the parameter to return
        @returns A Parameter object containing the context and value for the specified parameter
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if self.closed:
            raise ValueError('I/O operation on closed file')

        if param_name in self._range_dictionary:
            p = Parameter(deepcopy(self._range_dictionary.get_context(param_name)), self._range_value[param_name].shape, self._range_value[param_name])
            return p
        else:
            raise KeyError('Coverage does not contain parameter \'{0}\''.format(param_name))

    def list_parameters(self, coords_only=False, data_only=False):
        """
        List the names of the parameters contained in the coverage

        @param coords_only List only the coordinate parameters
        @param data_only   List only the data parameters (non-coordinate) - superseded by coords_only
        @returns A list of parameter names
        """
        if coords_only:
            lst=[x for x, v in self._range_dictionary.iteritems() if v[1].is_coordinate]
        elif data_only:
            lst=[x for x, v in self._range_dictionary.iteritems() if not v[1].is_coordinate]
        else:
            lst=[x for x in self._range_dictionary]
        lst.sort()
        return lst

    def insert_timesteps(self, count, origin=None, oob=True):
        """
        Insert count # of timesteps beginning at the origin

        The specified # of timesteps are inserted into the temporal value array at the indicated origin.  This also
        expands the temporal dimension of the AbstractParameterValue for each parameters

        @param count    The number of timesteps to insert
        @param origin   The starting location, from which to begin the insertion
        @param oob      Out of band operations, True will use greenlets, False will be in-band.
        """
        if self.closed:
            raise IOError('I/O operation on closed file')

        if self.mode == 'r':
            raise IOError('Coverage not open for writing: mode == \'{0}\''.format(self.mode))

        # Get the current shape of the temporal_dimension
        shp = self.temporal_domain.shape

        # If not provided, set the origin to the end of the array
        if origin is None or not isinstance(origin, int):
            origin = shp.extents[0]

        # Expand the shape of the temporal_domain - following works if extents is a list or tuple
        shp.extents = (shp.extents[0]+count,)+tuple(shp.extents[1:])

        # Expand the temporal dimension of each of the parameters - the parameter determines how to apply the change
        for n in self._range_dictionary:
            pc = self._range_dictionary.get_context(n)
            # Update the dom of the parameter_context
            if pc.dom.tdom is not None:
                pc.dom.tdom = self.temporal_domain.shape.extents

            self._persistence_layer.expand_domain(pc)
            self._range_value[n].expand_content(VariabilityEnum.TEMPORAL, origin, count)

        # Update the temporal_domain in the master_manager, do NOT flush!!
        self._persistence_layer.update_domain(tdom=self.temporal_domain, do_flush=False)
        # Flush the master_manager & parameter_managers in a separate greenlet
        if oob:
            spawn(self._persistence_layer.flush)
        else:
            self._persistence_layer.flush()

    def set_time_values(self, value, tdoa=None):
        """
        Convenience method for setting time values

        @param value    The value to set
        @param tdoa The temporal DomainOfApplication; default to full Domain
        """
        return self.set_parameter_values(self.temporal_parameter_name, value, tdoa, None)

    def get_time_values(self, tdoa=None, return_value=None):
        """
        Convenience method for retrieving time values

        Delegates to get_parameter_values, supplying the temporal parameter name and sdoa == None
        @param tdoa The temporal DomainOfApplication; default to full Domain
        @param return_value If supplied, filled with response value
        """
        return self.get_parameter_values(self.temporal_parameter_name, tdoa, None, return_value)

    @property
    def num_timesteps(self):
        """
        The current number of timesteps
        """
        return self.temporal_domain.shape.extents[0]

    def set_parameter_values(self, param_name, value, tdoa=None, sdoa=None):
        """
        Assign value to the specified parameter

        Assigns the value to param_name within the coverage.  Temporal and spatial DomainOfApplication objects can be
        applied to constrain the assignment.  See DomainOfApplication for details

        @param param_name   The name of the parameter
        @param value    The value to set
        @param tdoa The temporal DomainOfApplication
        @param sdoa The spatial DomainOfApplication
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if self.closed:
            raise IOError('I/O operation on closed file')

        if self.mode == 'r':
            raise IOError('Coverage not open for writing: mode == \'{0}\''.format(self.mode))

        if not param_name in self._range_value:
            raise KeyError('Parameter \'{0}\' not found in coverage_model'.format(param_name))

        slice_ = []

        tdoa = get_valid_DomainOfApplication(tdoa, self.temporal_domain.shape.extents)
        log.debug('Temporal doa: %s', tdoa.slices)
        slice_.extend(tdoa.slices)

        if self.spatial_domain is not None:
            sdoa = get_valid_DomainOfApplication(sdoa, self.spatial_domain.shape.extents)
            log.debug('Spatial doa: %s', sdoa.slices)
            slice_.extend(sdoa.slices)

        log.debug('Setting slice: %s', slice_)

        self._range_value[param_name][slice_] = value

    def get_parameter_values(self, param_name, tdoa=None, sdoa=None, return_value=None):
        """
        Retrieve the value for a parameter

        Returns the value from param_name.  Temporal and spatial DomainOfApplication objects can be used to
        constrain the response.  See DomainOfApplication for details.

        @param param_name   The name of the parameter
        @param tdoa The temporal DomainOfApplication
        @param sdoa The spatial DomainOfApplication
        @param return_value If supplied, filled with response value - currently via OVERWRITE
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if self.closed:
            raise ValueError('I/O operation on closed file')

        if not param_name in self._range_value:
            raise KeyError('Parameter \'{0}\' not found in coverage'.format(param_name))

        if return_value is not None:
            log.warn('Provided \'return_value\' will be OVERWRITTEN')

        slice_ = []

        tdoa = get_valid_DomainOfApplication(tdoa, self.temporal_domain.shape.extents)
        log.debug('Temporal doa: %s', tdoa.slices)
        slice_.extend(tdoa.slices)

        if self.spatial_domain is not None:
            sdoa = get_valid_DomainOfApplication(sdoa, self.spatial_domain.shape.extents)
            log.debug('Spatial doa: %s', sdoa.slices)
            slice_.extend(sdoa.slices)

        log.debug('Getting slice: %s', slice_)

        return_value = self._range_value[param_name][slice_]
        return return_value

    def get_parameter_context(self, param_name):
        """
        Retrieve a deepcopy of the ParameterContext object for the specified parameter

        @param param_name   The name of the parameter for which to retrieve context
        @returns A deepcopy of the specified ParameterContext object
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if not param_name in self._range_dictionary:
            raise KeyError('Parameter \'{0}\' not found in coverage'.format(param_name))

        return deepcopy(self._range_dictionary.get_context(param_name))

    def __axis_arg_to_params(self, axis=None):
        """
        Helper function to compose a list of parameter names based on the <i>axis</i> argument

        If <i>axis</i> is None, all coordinate parameters are included

        @param axis A member of AxisTypeEnum; may be an iterable of such members
        """
        params = []
        if axis is None:
            params.extend(pn for pk, pn in self.temporal_domain.crs.axes.iteritems())
            params.extend(pn for pk, pn in self.spatial_domain.crs.axes.iteritems())
        elif hasattr(axis, '__iter__'):
            for a in axis:
                if a in self.temporal_domain.crs.axes:
                    params.append(self.temporal_domain.crs.axes[a])
                elif a in self.spatial_domain.crs.axes:
                    params.append(self.spatial_domain.crs.axes[a])
                else:
                    raise ValueError('Specified axis ({0}) not found in coverage'.format(a))
        elif axis in self.temporal_domain.crs.axes:
            params.append(self.temporal_domain.crs.axes[axis])
        elif axis in self.spatial_domain.crs.axes:
            params.append(self.spatial_domain.crs.axes[axis])
        else:
            raise ValueError('Specified axis ({0}) not found in coverage'.format(axis))

        return params

    def __parameter_name_arg_to_params(self, parameter_name=None):
        """
        Helper function to compose a list of parameter names based on the <i>parameter_name</i> argument

        If <i>parameter_name</i> is None, all parameters in the coverage are included

        @param parameter_name A string parameter name; may be an iterable of such members
        """
        params = []
        if parameter_name is None:
            params.extend(self._range_dictionary.keys())
        elif hasattr(parameter_name, '__iter__'):
            params.extend(pn for pn in parameter_name if pn in self._range_dictionary.keys())
        else:
            params.append(parameter_name)

        return params

    def get_data_bounds(self, parameter_name=None):
        """
        Returns the bounds (min, max) for the parameter(s) indicated by <i>parameter_name</i>

        If <i>parameter_name</i> is None, all parameters in the coverage are included

        If more than one parameter is indicated by <i>parameter_name</i>, a dict of {key:(min,max)} is returned;
        otherwise, only the (min, max) tuple is returned

        @param parameter_name   A string parameter name; may be an iterable of such members
        """
        from coverage_model import QuantityType, ConstantType
        ret = {}
        for pn in self.__parameter_name_arg_to_params(parameter_name):
            ctxt = self._range_dictionary.get_context(pn)
            fv = ctxt.fill_value
            if isinstance(ctxt.param_type, QuantityType) or isinstance(ctxt.param_type, ConstantType):
                varr = np.ma.masked_equal(self._range_value[pn][:], fv, copy=False)
                r = (varr.min(), varr.max())
                ret[pn] = tuple([fv if isinstance(x, np.ma.core.MaskedConstant) else x for x in r])
            else:
                # CBM TODO: Sort out if this is an appropriate way to deal with non-numeric types
                ret[pn] = (fv, fv)

        if len(ret) == 1:
            ret = ret.values()[0]

        return ret

    def get_data_bounds_by_axis(self, axis=None):
        """
        Returns the bounds (min, max) for the coordinate parameter(s) indicated by <i>axis</i>

        If <i>axis</i> is None, all coordinate parameters are included

        If more than one parameter is indicated by <i>axis</i>, a dict of {key:(min,max)} is returned;
        otherwise, only the (min, max) tuple is returned

        @param axis   A member of AxisTypeEnum; may be an iterable of such members
        """
        return self.get_data_bounds(self.__axis_arg_to_params(axis))

    def get_data_extents(self, parameter_name=None):
        """
        Returns the extents (dim_0,dim_1,...,dim_n) for the parameter(s) indicated by <i>parameter_name</i>

        If <i>parameter_name</i> is None, all parameters in the coverage are included

        If more than one parameter is indicated by <i>parameter_name</i>, a dict of {key:(dim_0,dim_1,...,dim_n)} is returned;
        otherwise, only the (dim_0,dim_1,...,dim_n) tuple is returned

        @param parameter_name   A string parameter name; may be an iterable of such members
        """
        ret = {}
        for pn in self.__parameter_name_arg_to_params(parameter_name):
            p = self._range_dictionary.get_context(pn)
            ret[pn] = p.dom.total_extents

        if len(ret) == 1:
            ret = ret.values()[0]

        return ret

    def get_data_extents_by_axis(self, axis=None):
        """
        Returns the extents (dim_0,dim_1,...,dim_n) for the coordinate parameter(s) indicated by <i>axis</i>

        If <i>axis</i> is None, all coordinate parameters are included

        If more than one parameter is indicated by <i>axis</i>, a dict of {key:(dim_0,dim_1,...,dim_n)} is returned;
        otherwise, only the (dim_0,dim_1,...,dim_n) tuple is returned

        @param axis   A member of AxisTypeEnum; may be an iterable of such members
        """
        return self.get_data_extents(self.__axis_arg_to_params(axis))

    def get_data_size(self, parameter_name=None, slice_=None, in_bytes=False):
        """
        Returns the size of the <b>data values</b> for the parameter(s) indicated by <i>parameter_name</i>.
        ParameterContext and Coverage metadata is <b>NOT</b> included in the returned size.

        If <i>parameter_name</i> is None, all parameters in the coverage are included

        If more than one parameter is indicated by <i>parameter_name</i>, the sum of the indicated parameters is returned

        If <i>slice_</i> is not None, it is applied to each parameter (after being run through utils.fix_slice) before
        calculation of size

        Sizes are calculated as:
            size = itemsize * total_extent_size

        where:
            itemsize == the per-item size based on the data type of the parameter
            total_extent_size == the total number of elements after slicing is applied (if applicable)

        Sizes are in MB unless <i>in_bytes</i> == True

        @param parameter_name   A string parameter name; may be an iterable of such members
        @param slice_   If not None, applied to each parameter before calculation of size
        @param in_bytes If True, returns the size in bytes; otherwise, returns the size in MB (default)
        """
        size = 0
        if parameter_name is None:
            for pn in self._range_dictionary.keys():
                size += self.get_data_size(pn, in_bytes=in_bytes)

        for pn in self.__parameter_name_arg_to_params(parameter_name):
            p = self._range_dictionary.get_context(pn)
            te=p.dom.total_extents
            dt = np.dtype(p.param_type.value_encoding)

            if slice_ is not None:
                slice_ = utils.fix_slice(slice_, te)
                a=np.empty(te, dtype=dt)[slice_]
                size += a.nbytes
            else:
                size += dt.itemsize * utils.prod(te)

        if not in_bytes:
            size *= 9.53674e-7

        return size

    @property
    def info(self):
        """
        Returns a detailed string representation of the coverage contents
        @returns    string of coverage contents
        """
        lst = []
        indent = ' '
        lst.append('ID: {0}'.format(self._id))
        lst.append('Name: {0}'.format(self.name))
        lst.append('Temporal Domain:\n{0}'.format(self.temporal_domain.__str__(indent*2)))
        lst.append('Spatial Domain:\n{0}'.format(self.spatial_domain.__str__(indent*2)))

        lst.append('Parameters:')
        for x in self._range_value:
            lst.append('{0}{1} {2}\n{3}'.format(indent*2,x,self._range_value[x].shape,self._range_dictionary.get_context(x).__str__(indent*4)))

        return '\n'.join(lst)

    def __str__(self):
        lst = []
        indent = ' '
        lst.append('ID: {0}'.format(self._id))
        lst.append('Name: {0}'.format(self.name))
        lst.append('TemporalDomain: Shape=>{0} Axes=>{1}'.format(self.temporal_domain.shape.extents, self.temporal_domain.crs.axes))
        lst.append('SpatialDomain: Shape=>{0} Axes=>{1}'.format(self.spatial_domain.shape.extents, self.spatial_domain.crs.axes))
        lst.append('Coordinate Parameters: {0}'.format(self.list_parameters(coords_only=True)))
        lst.append('Data Parameters: {0}'.format(self.list_parameters(coords_only=False, data_only=True)))

        return '\n'.join(lst)
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'ruv_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/ruv/'
        dset.dataset_description.parameters['list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv'
        dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M'
        dset.dataset_description.parameters['date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = [
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='ruv_model')
        dsrc_model.model = 'RUV'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(RT.DataProduct,
            name='ruv_parsed_product',
            description='parsed ruv product',
            temporal_domain = tdom,
            spatial_domain = sdom)

        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
                                                    stream_definition_id=streamdef_id,
                                                    parameter_dictionary=parameter_dictionary)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

        #ttool = TaxyTool()
        #
        #ttool.add_taxonomy_set('data','test data')
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='ruv',stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            'external_dataset_res':dset,
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':20,
        }
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = "Christopher Mueller"
        dprov.contact.email = "*****@*****.**"

        # Create DataSource
        dsrc = DataSource(protocol_type="FILE", institution=Institution(), contact=ContactInformation())
        dsrc.connection_params["base_data_url"] = ""
        dsrc.contact.name = "Tim Giguere"
        dsrc.contact.email = "*****@*****.**"

        # Create ExternalDataset
        ds_name = "slocum_test_dataset"
        dset = ExternalDataset(
            name=ds_name,
            dataset_description=DatasetDescription(),
            update_description=UpdateDescription(),
            contact=ContactInformation(),
        )

        dset.dataset_description.parameters["base_url"] = "test_data/slocum/"
        dset.dataset_description.parameters["list_pattern"] = "ru05-2012-021-0-0-sbd.dat"
        dset.dataset_description.parameters["date_pattern"] = "%Y %j"
        dset.dataset_description.parameters["date_extraction_pattern"] = "ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat"
        dset.dataset_description.parameters["temporal_dimension"] = None
        dset.dataset_description.parameters["zonal_dimension"] = None
        dset.dataset_description.parameters["meridional_dimension"] = None
        dset.dataset_description.parameters["vertical_dimension"] = None
        dset.dataset_description.parameters["variables"] = [
            "c_wpt_y_lmc",
            "sci_water_cond",
            "m_y_lmc",
            "u_hd_fin_ap_inflection_holdoff",
            "sci_m_present_time",
            "m_leakdetect_voltage_forward",
            "sci_bb3slo_b660_scaled",
            "c_science_send_all",
            "m_gps_status",
            "m_water_vx",
            "m_water_vy",
            "c_heading",
            "sci_fl3slo_chlor_units",
            "u_hd_fin_ap_gain",
            "m_vacuum",
            "u_min_water_depth",
            "m_gps_lat",
            "m_veh_temp",
            "f_fin_offset",
            "u_hd_fin_ap_hardover_holdoff",
            "c_alt_time",
            "m_present_time",
            "m_heading",
            "sci_bb3slo_b532_scaled",
            "sci_fl3slo_cdom_units",
            "m_fin",
            "x_cycle_overrun_in_ms",
            "sci_water_pressure",
            "u_hd_fin_ap_igain",
            "sci_fl3slo_phyco_units",
            "m_battpos",
            "sci_bb3slo_b470_scaled",
            "m_lat",
            "m_gps_lon",
            "sci_ctd41cp_timestamp",
            "m_pressure",
            "c_wpt_x_lmc",
            "c_ballast_pumped",
            "x_lmc_xy_source",
            "m_lon",
            "m_avg_speed",
            "sci_water_temp",
            "u_pitch_ap_gain",
            "m_roll",
            "m_tot_num_inflections",
            "m_x_lmc",
            "u_pitch_ap_deadband",
            "m_final_water_vy",
            "m_final_water_vx",
            "m_water_depth",
            "m_leakdetect_voltage",
            "u_pitch_max_delta_battpos",
            "m_coulomb_amphr",
            "m_pitch",
        ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name="slocum_model")
        dsrc_model.model = "SLOCUM"
        dsrc_model.data_handler_module = "N/A"
        dsrc_model.data_handler_class = "N/A"

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id
        )
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        # create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        # Generate the data product and associate it to the ExternalDataset

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(
            RT.DataProduct,
            name="slocum_parsed_product",
            description="parsed slocum product",
            temporal_domain=tdom,
            spatial_domain=sdom,
        )

        dproduct_id = dpms_cli.create_data_product(
            data_product=dprod, stream_definition_id=streamdef_id, parameter_dictionary=parameter_dictionary
        )

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(
            subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True
        )
        stream_id = stream_id[0]

        log.info(
            "Created resources: {0}".format(
                {
                    "ExternalDataset": ds_id,
                    "ExternalDataProvider": ext_dprov_id,
                    "DataSource": ext_dsrc_id,
                    "DataSourceModel": ext_dsrc_model_id,
                    "DataProducer": dproducer_id,
                    "DataProduct": dproduct_id,
                    "Stream": stream_id,
                }
            )
        )

        # CBM: Use CF standard_names

        #        ttool = TaxyTool()
        #
        #        ttool.add_taxonomy_set('c_wpt_y_lmc'),
        #        ttool.add_taxonomy_set('sci_water_cond'),
        #        ttool.add_taxonomy_set('m_y_lmc'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_inflection_holdoff'),
        #        ttool.add_taxonomy_set('sci_m_present_time'),
        #        ttool.add_taxonomy_set('m_leakdetect_voltage_forward'),
        #        ttool.add_taxonomy_set('sci_bb3slo_b660_scaled'),
        #        ttool.add_taxonomy_set('c_science_send_all'),
        #        ttool.add_taxonomy_set('m_gps_status'),
        #        ttool.add_taxonomy_set('m_water_vx'),
        #        ttool.add_taxonomy_set('m_water_vy'),
        #        ttool.add_taxonomy_set('c_heading'),
        #        ttool.add_taxonomy_set('sci_fl3slo_chlor_units'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_gain'),
        #        ttool.add_taxonomy_set('m_vacuum'),
        #        ttool.add_taxonomy_set('u_min_water_depth'),
        #        ttool.add_taxonomy_set('m_gps_lat'),
        #        ttool.add_taxonomy_set('m_veh_temp'),
        #        ttool.add_taxonomy_set('f_fin_offset'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_hardover_holdoff'),
        #        ttool.add_taxonomy_set('c_alt_time'),
        #        ttool.add_taxonomy_set('m_present_time'),
        #        ttool.add_taxonomy_set('m_heading'),
        #        ttool.add_taxonomy_set('sci_bb3slo_b532_scaled'),
        #        ttool.add_taxonomy_set('sci_fl3slo_cdom_units'),
        #        ttool.add_taxonomy_set('m_fin'),
        #        ttool.add_taxonomy_set('x_cycle_overrun_in_ms'),
        #        ttool.add_taxonomy_set('sci_water_pressure'),
        #        ttool.add_taxonomy_set('u_hd_fin_ap_igain'),
        #        ttool.add_taxonomy_set('sci_fl3slo_phyco_units'),
        #        ttool.add_taxonomy_set('m_battpos'),
        #        ttool.add_taxonomy_set('sci_bb3slo_b470_scaled'),
        #        ttool.add_taxonomy_set('m_lat'),
        #        ttool.add_taxonomy_set('m_gps_lon'),
        #        ttool.add_taxonomy_set('sci_ctd41cp_timestamp'),
        #        ttool.add_taxonomy_set('m_pressure'),
        #        ttool.add_taxonomy_set('c_wpt_x_lmc'),
        #        ttool.add_taxonomy_set('c_ballast_pumped'),
        #        ttool.add_taxonomy_set('x_lmc_xy_source'),
        #        ttool.add_taxonomy_set('m_lon'),
        #        ttool.add_taxonomy_set('m_avg_speed'),
        #        ttool.add_taxonomy_set('sci_water_temp'),
        #        ttool.add_taxonomy_set('u_pitch_ap_gain'),
        #        ttool.add_taxonomy_set('m_roll'),
        #        ttool.add_taxonomy_set('m_tot_num_inflections'),
        #        ttool.add_taxonomy_set('m_x_lmc'),
        #        ttool.add_taxonomy_set('u_pitch_ap_deadband'),
        #        ttool.add_taxonomy_set('m_final_water_vy'),
        #        ttool.add_taxonomy_set('m_final_water_vx'),
        #        ttool.add_taxonomy_set('m_water_depth'),
        #        ttool.add_taxonomy_set('m_leakdetect_voltage'),
        #        ttool.add_taxonomy_set('u_pitch_max_delta_battpos'),
        #        ttool.add_taxonomy_set('m_coulomb_amphr'),
        #        ttool.add_taxonomy_set('m_pitch'),

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext("c_wpt_y_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_water_cond", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_y_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "u_hd_fin_ap_inflection_holdoff", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_m_present_time", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "m_leakdetect_voltage_forward", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_bb3slo_b660_scaled", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_science_send_all", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_gps_status", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_water_vx", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_water_vy", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_heading", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_fl3slo_chlor_units", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_hd_fin_ap_gain", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_vacuum", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_min_water_depth", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_gps_lat", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_veh_temp", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("f_fin_offset", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "u_hd_fin_ap_hardover_holdoff", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_alt_time", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_present_time", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_heading", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_bb3slo_b532_scaled", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_fl3slo_cdom_units", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_fin", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "x_cycle_overrun_in_ms", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_water_pressure", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_hd_fin_ap_igain", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_fl3slo_phyco_units", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_battpos", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_bb3slo_b470_scaled", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_lat", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_gps_lon", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "sci_ctd41cp_timestamp", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_pressure", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_wpt_x_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("c_ballast_pumped", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("x_lmc_xy_source", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_lon", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_avg_speed", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("sci_water_temp", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_pitch_ap_gain", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_roll", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "m_tot_num_inflections", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_x_lmc", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("u_pitch_ap_deadband", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_final_water_vy", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_final_water_vx", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_water_depth", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "m_leakdetect_voltage", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext(
            "u_pitch_max_delta_battpos", param_type=QuantityType(value_encoding=numpy.dtype("float32"))
        )
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_coulomb_amphr", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext("m_pitch", param_type=QuantityType(value_encoding=numpy.dtype("float32")))
        t_ctxt.uom = "unknown"
        pdict.add_context(t_ctxt)

        # CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        self.create_stream_and_logger(name="slocum", stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG["dh_cfg"] = {
            "TESTING": True,
            "stream_id": stream_id,
            "external_dataset_res": dset,
            "param_dictionary": pdict.dump(),
            "data_producer_id": dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            "max_records": 20,
        }
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('c_wpt_y_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_water_cond', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_y_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_inflection_holdoff', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_m_present_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_leakdetect_voltage_forward', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_bb3slo_b660_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_science_send_all', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_gps_status', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_water_vx', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_water_vy', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_heading', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_fl3slo_chlor_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_gain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_vacuum', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_min_water_depth', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_gps_lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_veh_temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('f_fin_offset', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_hardover_holdoff', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_alt_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_present_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_heading', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_bb3slo_b532_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_fl3slo_cdom_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_fin', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('x_cycle_overrun_in_ms', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_water_pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_hd_fin_ap_igain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_fl3slo_phyco_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_battpos', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_bb3slo_b470_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_gps_lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_ctd41cp_timestamp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_wpt_x_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('c_ballast_pumped', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('x_lmc_xy_source', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_avg_speed', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('sci_water_temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_pitch_ap_gain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_roll', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_tot_num_inflections', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_x_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_pitch_ap_deadband', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_final_water_vy', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_final_water_vx', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_water_depth', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_leakdetect_voltage', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('u_pitch_max_delta_battpos', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_coulomb_amphr', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        t_ctxt = ParameterContext('m_pitch', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        t_ctxt.uom = 'unknown'
        pdict.add_context(t_ctxt)

        return pdict
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        return pdict
Ejemplo n.º 22
0
class ctd_L0_all(TransformDataProcess):
    """Model for a TransformDataProcess

    """

    incoming_stream_def = SBE37_CDM_stream_definition()

    def __init__(self):

        super(ctd_L0_all, self).__init__()

        # Make the stream definitions of the transform class attributes

        #outgoing_stream_pressure = L0_pressure_stream_definition()
        #outgoing_stream_temperature = L0_temperature_stream_definition()
        #outgoing_stream_conductivity = L0_conductivity_stream_definition()

        ### Taxonomies are defined before hand out of band... somehow.
    #    pres = TaxyTool()
    #    pres.add_taxonomy_set('pres','long name for pres')
    #    pres.add_taxonomy_set('lat','long name for latitude')
    #    pres.add_taxonomy_set('lon','long name for longitude')
    #    pres.add_taxonomy_set('height','long name for height')
    #    pres.add_taxonomy_set('time','long name for time')
    #    # This is an example of using groups it is not a normative statement about how to use groups
    #    pres.add_taxonomy_set('coordinates','This group contains coordinates...')
    #    pres.add_taxonomy_set('data','This group contains data...')
    #
    #    temp = TaxyTool()
    #    temp.add_taxonomy_set('temp','long name for temp')
    #    temp.add_taxonomy_set('lat','long name for latitude')
    #    temp.add_taxonomy_set('lon','long name for longitude')
    #    temp.add_taxonomy_set('height','long name for height')
    #    temp.add_taxonomy_set('time','long name for time')
    #    # This is an example of using groups it is not a normative statement about how to use groups
    #    temp.add_taxonomy_set('coordinates','This group contains coordinates...')
    #    temp.add_taxonomy_set('data','This group contains data...')
    #
    #    coord = TaxyTool()
    #    coord.add_taxonomy_set('cond','long name for cond')
    #    coord.add_taxonomy_set('lat','long name for latitude')
    #    coord.add_taxonomy_set('lon','long name for longitude')
    #    coord.add_taxonomy_set('height','long name for height')
    #    coord.add_taxonomy_set('time','long name for time')
    #    # This is an example of using groups it is not a normative statement about how to use groups
    #    coord.add_taxonomy_set('coordinates','This group contains coordinates...')
    #    coord.add_taxonomy_set('data','This group contains data...')

        ### Parameter dictionaries
        self.defining_parameter_dictionary()

        self.publisher = Publisher(to_name=NameTrio(get_sys_name(), str(uuid.uuid4())[0:6]))

    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'degree_Celsius'
        pres_ctxt.fill_value = 0e0

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0

        cond_ctxt = ParameterContext('cond', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.pres = ParameterDictionary()
        self.pres.add_context(t_ctxt)
        self.pres.add_context(lat_ctxt)
        self.pres.add_context(lon_ctxt)
        self.pres.add_context(height_ctxt)
        self.pres.add_context(pres_ctxt)
        self.pres.add_context(data_ctxt)

        self.temp = ParameterDictionary()
        self.temp.add_context(t_ctxt)
        self.temp.add_context(lat_ctxt)
        self.temp.add_context(lon_ctxt)
        self.temp.add_context(height_ctxt)
        self.temp.add_context(temp_ctxt)
        self.temp.add_context(data_ctxt)

        self.cond = ParameterDictionary()
        self.cond.add_context(t_ctxt)
        self.cond.add_context(lat_ctxt)
        self.cond.add_context(lon_ctxt)
        self.cond.add_context(height_ctxt)
        self.cond.add_context(cond_ctxt)
        self.cond.add_context(data_ctxt)

    def process(self, packet):

        """Processes incoming data!!!!
        """

        # Use the PointSupplementStreamParser to pull data from a granule
        #psd = PointSupplementStreamParser(stream_definition=self.incoming_stream_def, stream_granule=packet)
        rdt = RecordDictionaryTool.load_from_granule(packet)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        conductivity = get_safe(rdt, 'cond') #psd.get_values('conductivity')
        pressure = get_safe(rdt, 'pres') #psd.get_values('pressure')
        temperature = get_safe(rdt, 'temp') #psd.get_values('temperature')

        longitude = get_safe(rdt, 'lon') # psd.get_values('longitude')
        latitude = get_safe(rdt, 'lat')  #psd.get_values('latitude')
        time = get_safe(rdt, 'time') # psd.get_values('time')
        height = get_safe(rdt, 'height') # psd.get_values('time')

        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got pressure: %s' % str(pressure))
        log.warn('Got temperature: %s' % str(temperature))

        g = self._build_granule_settings(self.cond, 'cond', conductivity, time, latitude, longitude, height)

        # publish a granule
        self.cond_publisher = self.publisher
        self.cond_publisher.publish(g)

        g = self._build_granule_settings(self.temp, 'temp', temperature, time, latitude, longitude, height)

        # publish a granule
        self.temp_publisher = self.publisher
        self.temp_publisher.publish(g)

        g = self._build_granule_settings(self.pres, 'pres', pressure, time, latitude, longitude, height)

        # publish a granule
        self.pres_publisher = self.publisher
        self.pres_publisher.publish(g)

    def _build_granule_settings(self, param_dictionary=None, field_name='', value=None, time=None, latitude=None, longitude=None, height=None):

        root_rdt = RecordDictionaryTool(param_dictionary=param_dictionary)

        #data_rdt = RecordDictionaryTool(taxonomy=taxonomy)

        root_rdt[field_name] = value

        #coor_rdt = RecordDictionaryTool(taxonomy=taxonomy)

        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

        #todo: use only flat dicts for now, may change later...
#        root_rdt['coordinates'] = coor_rdt
#        root_rdt['data'] = data_rdt

        log.debug("ctd_L0_all:_build_granule_settings: logging published Record Dictionary:\n %s", str(root_rdt.pretty_print()))

        return build_granule(data_producer_id='ctd_L0', param_dictionary=param_dictionary, record_dictionary=root_rdt)
Ejemplo n.º 23
0
    def create_parameters(cls):
        '''
        WARNING: This method is a wrapper intended only for tests, it should not be used in production code.
        It probably will not align to most datasets.
        '''
        pdict = ParameterDictionary()
        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0
        pdict.add_context(data_ctxt)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        sal_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=np.float32))
        sal_ctxt.uom = 'PSU'
        sal_ctxt.fill_value = 0x0
        pdict.add_context(sal_ctxt)

        dens_ctxt = ParameterContext('density', param_type=QuantityType(value_encoding=np.float32))
        dens_ctxt.uom = 'unknown'
        dens_ctxt.fill_value = 0x0
        pdict.add_context(dens_ctxt)

        return pdict
Ejemplo n.º 24
0
    def create_parameters(cls):
        '''
        WARNING: This method is a wrapper intended only for tests, it should not be used in production code.
        It probably will not align to most datasets.
        '''
        pdict = ParameterDictionary()
        t_ctxt = ParameterContext(
            'time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext(
            'lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext(
            'lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext(
            'temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext(
            'conductivity', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        data_ctxt = ParameterContext(
            'data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0
        pdict.add_context(data_ctxt)

        pres_ctxt = ParameterContext(
            'pressure', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'Pascal'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        sal_ctxt = ParameterContext(
            'salinity', param_type=QuantityType(value_encoding=np.float32))
        sal_ctxt.uom = 'PSU'
        sal_ctxt.fill_value = 0x0
        pdict.add_context(sal_ctxt)

        dens_ctxt = ParameterContext(
            'density', param_type=QuantityType(value_encoding=np.float32))
        dens_ctxt.uom = 'unknown'
        dens_ctxt.fill_value = 0x0
        pdict.add_context(dens_ctxt)

        return pdict
Ejemplo n.º 25
0
    def create_parameters(cls):
        pdict = ParameterDictionary()
        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0
        pdict.add_context(lon_ctxt)

        depth_ctxt = ParameterContext('depth', param_type=QuantityType(value_encoding=np.float32))
        depth_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        depth_ctxt.uom = 'meters'
        depth_ctxt.fill_value = 0e0
        pdict.add_context(depth_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0
        pdict.add_context(data_ctxt)

        return pdict
    def _create_input_param_dict_for_test(self, parameter_dict_name = ''):

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('float64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1900'
        pdict.add_context(t_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'Siemens_per_meter'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'Pascal'
        pdict.add_context(pres_ctxt)

        if parameter_dict_name == 'input_param_for_L0':
            temp_ctxt = ParameterContext('temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        else:
            temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))

        temp_ctxt.uom = 'degree_kelvin'
        pdict.add_context(temp_ctxt)

        dens_ctxt = ParameterContext('density', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        dens_ctxt.uom = 'g/m'
        pdict.add_context(dens_ctxt)

        sal_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        sal_ctxt.uom = 'PSU'
        pdict.add_context(sal_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            ctxt_id = self.dataset_management.create_parameter_context(pc_k, pc[1].dump())
            pc_list.append(ctxt_id)
            if parameter_dict_name == 'input_param_for_L0':
                self.addCleanup(self.dataset_management.delete_parameter_context,ctxt_id)
            elif pc[1].name == 'temp':
                self.addCleanup(self.dataset_management.delete_parameter_context,ctxt_id)

        pdict_id = self.dataset_management.create_parameter_dictionary(parameter_dict_name, pc_list)
        self.addCleanup(self.dataset_management.delete_parameter_dictionary, pdict_id)

        return pdict_id
Ejemplo n.º 27
0
class CTDL1TemperatureTransform(TransformFunction):
    ''' A basic transform that receives input through a subscription,
    parses the input from a CTD, extracts the temperature vaule and scales it accroding to
    the defined algorithm. If the transform
    has an output_stream it will publish the output on the output stream.

    '''

    # Make the stream definitions of the transform class attributes... best available option I can think of?
    incoming_stream_def = L0_temperature_stream_definition()
    outgoing_stream_def = L1_temperature_stream_definition()


    def __init__(self):

        ### Parameter dictionaries
        self.defining_parameter_dictionary()

    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.float32))
        temp_ctxt.uom = 'degree_Celsius'
        temp_ctxt.fill_value = 0e0


        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.temp = ParameterDictionary()
        self.temp.add_context(t_ctxt)
        self.temp.add_context(lat_ctxt)
        self.temp.add_context(lon_ctxt)
        self.temp.add_context(height_ctxt)
        self.temp.add_context(temp_ctxt)
        self.temp.add_context(data_ctxt)

    def execute(self, granule):
        """Processes incoming data!!!!
        """

        rdt = RecordDictionaryTool.load_from_granule(granule)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        temperature = get_safe(rdt, 'temp')

        longitude = get_safe(rdt, 'lon')
        latitude = get_safe(rdt, 'lat')
        time = get_safe(rdt, 'time')
        height = get_safe(rdt, 'height')

        log.warn('Got temperature: %s' % str(temperature))


        # The L1 temperature data product algorithm takes the L0 temperature data product and converts it into Celcius.
        # Once the hexadecimal string is converted to decimal, only scaling (dividing by a factor and adding an offset) is
        # required to produce the correct decimal representation of the data in Celsius.
        # The scaling function differs by CTD make/model as described below.
        #    SBE 37IM, Output Format 0
        #    1) Standard conversion from 5-character hex string (Thex) to decimal (tdec)
        #    2) Scaling: T [C] = (tdec / 10,000) - 10

        root_rdt = RecordDictionaryTool(param_dictionary=self.temp)

        #todo: use only flat dicts for now, may change later...
#        data_rdt = RecordDictionaryTool(taxonomy=self.tx)
#        coord_rdt = RecordDictionaryTool(taxonomy=self.tx)

        scaled_temperature = temperature

        for i in xrange(len(temperature)):
            scaled_temperature[i] = ( temperature[i] / 10000.0) - 10

        root_rdt['temp'] = scaled_temperature
        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

        #todo: use only flat dicts for now, may change later...
#        root_rdt['coordinates'] = coord_rdt
#        root_rdt['data'] = data_rdt

        return build_granule(data_producer_id='ctd_L1_temperature', param_dictionary=self.temp, record_dictionary=root_rdt)
Ejemplo n.º 28
0
class SimplexCoverage(AbstractCoverage):
    """
    A concrete implementation of AbstractCoverage consisting of 2 domains (temporal and spatial)
    and a collection of parameters associated with one or both of the domains.  Each parameter is defined by a
    ParameterContext object (provided via the ParameterDictionary) and has content represented by a concrete implementation
    of the AbstractParameterValue class.

    """
    def __init__(self, root_dir, persistence_guid, name=None, parameter_dictionary=None, temporal_domain=None, spatial_domain=None, in_memory_storage=False, bricking_scheme=None):
        """
        Constructor for SimplexCoverage

        @param root_dir The root directory for storage of this coverage
        @param persistence_guid The persistence uuid for this coverage
        @param name The name of the coverage
        @param parameter_dictionary    a ParameterDictionary object expected to contain one or more valid ParameterContext objects
        @param spatial_domain  a concrete instance of AbstractDomain for the spatial domain component
        @param temporal_domain a concrete instance of AbstractDomain for the temporal domain component
        """

        # Make sure root_dir and persistence_guid are both not None and are strings
        if not isinstance(root_dir, str) or not isinstance(persistence_guid, str):
            raise SystemError('\'root_dir\' and \'persistence_guid\' must be instances of str')

        pth=os.path.join(root_dir, persistence_guid)

        def _doload(self):
            # Make sure the coverage directory exists
            if not os.path.exists(pth):
                raise SystemError('Cannot find specified coverage: {0}'.format(pth))

            # All appears well - load it up!
            self._persistence_layer = PersistenceLayer(root_dir, persistence_guid)

            self.name = self._persistence_layer.name
            self.spatial_domain = self._persistence_layer.sdom
            self.temporal_domain = self._persistence_layer.tdom

            self._range_dictionary = ParameterDictionary()
            self._range_value = RangeValues()

            self._bricking_scheme = self._persistence_layer.global_bricking_scheme
            self._temporal_param_name = self._persistence_layer.temporal_param_name

            self._in_memory_storage = False

            from coverage_model.persistence import PersistedStorage
            for parameter_name in self._persistence_layer.parameter_metadata.keys():
                md = self._persistence_layer.parameter_metadata[parameter_name]
                pc = md.parameter_context
                self._range_dictionary.add_context(pc)
                s = PersistedStorage(md, self._persistence_layer.brick_dispatcher, dtype=pc.param_type.value_encoding, fill_value=pc.param_type.fill_value)
                self._range_value[parameter_name] = get_value_class(param_type=pc.param_type, domain_set=pc.dom, storage=s)


        AbstractCoverage.__init__(self)
        if name is None or parameter_dictionary is None:
            # This appears to be a load
            _doload(self)

        else:
            # This appears to be a new coverage
            # Make sure name and parameter_dictionary are not None
            if name is None or parameter_dictionary is None:
                raise SystemError('\'name\' and \'parameter_dictionary\' cannot be None')

            # Make sure the specified root_dir exists
            if not in_memory_storage and not os.path.exists(root_dir):
                raise SystemError('Cannot find specified \'root_dir\': {0}'.format(root_dir))

            # If the coverage directory exists, load it instead!!
            if os.path.exists(pth):
                log.warn('The specified coverage already exists - performing load of \'{0}\''.format(pth))
                _doload(self)
                return

            self.name = name
            self.spatial_domain = deepcopy(spatial_domain)
            self.temporal_domain = deepcopy(temporal_domain) or GridDomain(GridShape('temporal',[0]), CRS.standard_temporal(), MutabilityEnum.EXTENSIBLE)

            if not isinstance(parameter_dictionary, ParameterDictionary):
                raise TypeError('\'parameter_dictionary\' must be of type ParameterDictionary')
            self._range_dictionary = ParameterDictionary()
            self._range_value = RangeValues()

            self._bricking_scheme = bricking_scheme or {'brick_size':10,'chunk_size':5}
            self._temporal_param_name = None

            self._in_memory_storage = in_memory_storage
            if self._in_memory_storage:
                self._persistence_layer = InMemoryPersistenceLayer()
            else:
                self._persistence_layer = PersistenceLayer(root_dir, persistence_guid, name=name, tdom=temporal_domain, sdom=spatial_domain, bricking_scheme=self._bricking_scheme)

            for o, pc in parameter_dictionary.itervalues():
                self._append_parameter(pc)

    @classmethod
    def _fromdict(cls, cmdict, arg_masks=None):
        return super(SimplexCoverage, cls)._fromdict(cmdict, {'parameter_dictionary':'_range_dictionary'})

    @property
    def parameter_dictionary(self):
        return deepcopy(self._range_dictionary)

    @property
    def persistence_guid(self):
        if isinstance(self._persistence_layer, InMemoryPersistenceLayer):
            return None
        else:
            return self._persistence_layer.guid

    def append_parameter(self, parameter_context):
        """
        Append a ParameterContext to the coverage

        @deprecated use a ParameterDictionary during construction of the coverage
        """
        log.warn('SimplexCoverage.append_parameter() is deprecated: use a ParameterDictionary during construction of the coverage')
        self._append_parameter(parameter_context)

    def _append_parameter(self, parameter_context):
        """
        Appends a ParameterContext object to the internal set for this coverage.

        A <b>deep copy</b> of the supplied ParameterContext is added to self._range_dictionary.  An AbstractParameterValue of the type
        indicated by ParameterContext.param_type is added to self._range_value.  If the ParameterContext indicates that
        the parameter is a coordinate parameter, it is associated with the indicated axis of the appropriate CRS.

        @param parameter_context    The ParameterContext to append to the coverage <b>as a copy</b>
        @throws StandardError   If the ParameterContext.axis indicates that it is temporal and a temporal parameter
        already exists in the coverage
        """
        if not isinstance(parameter_context, ParameterContext):
            raise TypeError('\'parameter_context\' must be an instance of ParameterContext')

        # Create a deep copy of the ParameterContext
        pcontext = deepcopy(parameter_context)

        pname = pcontext.name

        no_sdom = self.spatial_domain is None

        ## Determine the correct array shape

        # Get the parameter variability; assign to VariabilityEnum.NONE if None
        pv=pcontext.variability or VariabilityEnum.NONE
        if no_sdom and pv in (VariabilityEnum.SPATIAL, VariabilityEnum.BOTH):
            log.warn('Provided \'parameter_context\' indicates Spatial variability, but coverage has no Spatial Domain')

        if pv == VariabilityEnum.TEMPORAL: # Only varies in the Temporal Domain
            pcontext.dom = DomainSet(self.temporal_domain.shape.extents, None)
        elif pv == VariabilityEnum.SPATIAL: # Only varies in the Spatial Domain
            pcontext.dom = DomainSet(None, self.spatial_domain.shape.extents)
        elif pv == VariabilityEnum.BOTH: # Varies in both domains
            # If the Spatial Domain is only a single point on a 0d Topology, the parameter's shape is that of the Temporal Domain only
            if no_sdom or (len(self.spatial_domain.shape.extents) == 1 and self.spatial_domain.shape.extents[0] == 0):
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, None)
            else:
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, self.spatial_domain.shape.extents)
        elif pv == VariabilityEnum.NONE: # No variance; constant
            # CBM TODO: Not sure we can have this constraint - precludes situations like a TextType with Variablity==None...
#            # This is a constant - if the ParameterContext is not a ConstantType, make it one with the default 'x' expr
#            if not isinstance(pcontext.param_type, ConstantType):
#                pcontext.param_type = ConstantType(pcontext.param_type)

            # The domain is the total domain - same value everywhere!!
            # If the Spatial Domain is only a single point on a 0d Topology, the parameter's shape is that of the Temporal Domain only
            if no_sdom or (len(self.spatial_domain.shape.extents) == 1 and self.spatial_domain.shape.extents[0] == 0):
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, None)
            else:
                pcontext.dom = DomainSet(self.temporal_domain.shape.extents, self.spatial_domain.shape.extents)
        else:
            # Should never get here...but...
            raise SystemError('Must define the variability of the ParameterContext: a member of VariabilityEnum')

        # Assign the pname to the CRS (if applicable) and select the appropriate domain (default is the spatial_domain)
        dom = self.spatial_domain
        is_tparam = False
        if not pcontext.reference_frame is None and AxisTypeEnum.is_member(pcontext.reference_frame, AxisTypeEnum.TIME):
            if self._temporal_param_name is None:
                self._temporal_param_name = pname
                is_tparam = True
            else:
                raise StandardError("temporal_parameter already defined.")
            dom = self.temporal_domain
            dom.crs.axes[pcontext.reference_frame] = pcontext.name
        elif not no_sdom and (pcontext.reference_frame in self.spatial_domain.crs.axes):
            dom.crs.axes[pcontext.reference_frame] = pcontext.name

        self._range_dictionary.add_context(pcontext)
        s = self._persistence_layer.init_parameter(pcontext, self._bricking_scheme, is_temporal_param=is_tparam)
        self._range_value[pname] = get_value_class(param_type=pcontext.param_type, domain_set=pcontext.dom, storage=s)

    def get_parameter(self, param_name):
        """
        Get a Parameter object by name

        The Parameter object contains the ParameterContext and AbstractParameterValue associated with the param_name

        @param param_name  The local name of the parameter to return
        @returns A Parameter object containing the context and value for the specified parameter
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if param_name in self._range_dictionary:
            p = Parameter(self._range_dictionary.get_context(param_name), self._range_value[param_name].shape, self._range_value[param_name])
            return p
        else:
            raise KeyError('Coverage does not contain parameter \'{0}\''.format(param_name))

    def list_parameters(self, coords_only=False, data_only=False):
        """
        List the names of the parameters contained in the coverage

        @param coords_only List only the coordinate parameters
        @param data_only   List only the data parameters (non-coordinate) - superseded by coords_only
        @returns A list of parameter names
        """
        if coords_only:
            lst=[x for x, v in self._range_dictionary.iteritems() if v[1].is_coordinate]
        elif data_only:
            lst=[x for x, v in self._range_dictionary.iteritems() if not v[1].is_coordinate]
        else:
            lst=[x for x in self._range_dictionary]
        lst.sort()
        return lst

    def insert_timesteps(self, count, origin=None):
        """
        Insert count # of timesteps beginning at the origin

        The specified # of timesteps are inserted into the temporal value array at the indicated origin.  This also
        expands the temporal dimension of the AbstractParameterValue for each parameters

        @param count    The number of timesteps to insert
        @param origin   The starting location, from which to begin the insertion
        """

        # Get the current shape of the temporal_dimension
        shp = self.temporal_domain.shape

        # If not provided, set the origin to the end of the array
        if origin is None or not isinstance(origin, int):
            origin = shp.extents[0]

        # Expand the shape of the temporal_domain - following works if extents is a list or tuple
        shp.extents = (shp.extents[0]+count,)+tuple(shp.extents[1:])

        # Expand the temporal dimension of each of the parameters - the parameter determines how to apply the change
        for n in self._range_dictionary:
            pc = self._range_dictionary.get_context(n)
            # Update the dom of the parameter_context
            if pc.dom.tdom is not None:
                pc.dom.tdom = self.temporal_domain.shape.extents

            self._persistence_layer.expand_domain(pc, tdom=self.temporal_domain)
            self._range_value[n].expand_content(VariabilityEnum.TEMPORAL, origin, count)

    def set_time_values(self, value, tdoa):
        """
        Convenience method for setting time values

        @param value    The value to set
        @param tdoa The temporal DomainOfApplication; default to full Domain
        """
        return self.set_parameter_values(self._temporal_param_name, value, tdoa, None)

    def get_time_values(self, tdoa=None, return_value=None):
        """
        Convenience method for retrieving time values

        Delegates to get_parameter_values, supplying the temporal parameter name and sdoa == None
        @param tdoa The temporal DomainOfApplication; default to full Domain
        @param return_value If supplied, filled with response value
        """
        return self.get_parameter_values(self._temporal_param_name, tdoa, None, return_value)

    @property
    def num_timesteps(self):
        """
        The current number of timesteps
        """
        return self.temporal_domain.shape.extents[0]

    def set_parameter_values(self, param_name, value, tdoa=None, sdoa=None):
        """
        Assign value to the specified parameter

        Assigns the value to param_name within the coverage.  Temporal and spatial DomainOfApplication objects can be
        applied to constrain the assignment.  See DomainOfApplication for details

        @param param_name   The name of the parameter
        @param value    The value to set
        @param tdoa The temporal DomainOfApplication
        @param sdoa The spatial DomainOfApplication
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if not param_name in self._range_value:
            raise KeyError('Parameter \'{0}\' not found in coverage_model'.format(param_name))

        slice_ = []

        tdoa = get_valid_DomainOfApplication(tdoa, self.temporal_domain.shape.extents)
        log.debug('Temporal doa: %s', tdoa.slices)
        slice_.extend(tdoa.slices)

        if self.spatial_domain is not None:
            sdoa = get_valid_DomainOfApplication(sdoa, self.spatial_domain.shape.extents)
            log.debug('Spatial doa: %s', sdoa.slices)
            slice_.extend(sdoa.slices)

        log.debug('Setting slice: %s', slice_)

        self._range_value[param_name][slice_] = value

    def get_parameter_values(self, param_name, tdoa=None, sdoa=None, return_value=None):
        """
        Retrieve the value for a parameter

        Returns the value from param_name.  Temporal and spatial DomainOfApplication objects can be used to
        constrain the response.  See DomainOfApplication for details.

        @param param_name   The name of the parameter
        @param tdoa The temporal DomainOfApplication
        @param sdoa The spatial DomainOfApplication
        @param return_value If supplied, filled with response value - currently via OVERWRITE
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if not param_name in self._range_value:
            raise KeyError('Parameter \'{0}\' not found in coverage'.format(param_name))

        if return_value is not None:
            log.warn('Provided \'return_value\' will be OVERWRITTEN')

        slice_ = []

        tdoa = get_valid_DomainOfApplication(tdoa, self.temporal_domain.shape.extents)
        log.debug('Temporal doa: %s', tdoa.slices)
        slice_.extend(tdoa.slices)

        if self.spatial_domain is not None:
            sdoa = get_valid_DomainOfApplication(sdoa, self.spatial_domain.shape.extents)
            log.debug('Spatial doa: %s', sdoa.slices)
            slice_.extend(sdoa.slices)

        log.debug('Getting slice: %s', slice_)

        return_value = self._range_value[param_name][slice_]
        return return_value

    def get_parameter_context(self, param_name):
        """
        Retrieve the ParameterContext object for the specified parameter

        @param param_name   The name of the parameter for which to retrieve context
        @returns A ParameterContext object
        @throws KeyError    The coverage does not contain a parameter with name 'param_name'
        """
        if not param_name in self._range_dictionary:
            raise KeyError('Parameter \'{0}\' not found in coverage'.format(param_name))

        return self._range_dictionary.get_context(param_name)

    @property
    def info(self):
        """
        Returns a detailed string representation of the coverage contents
        @returns    string of coverage contents
        """
        lst = []
        indent = ' '
        lst.append('ID: {0}'.format(self._id))
        lst.append('Name: {0}'.format(self.name))
        lst.append('Temporal Domain:\n{0}'.format(self.temporal_domain.__str__(indent*2)))
        lst.append('Spatial Domain:\n{0}'.format(self.spatial_domain.__str__(indent*2)))

        lst.append('Parameters:')
        for x in self._range_value:
            lst.append('{0}{1} {2}\n{3}'.format(indent*2,x,self._range_value[x].shape,self._range_dictionary.get_context(x).__str__(indent*4)))

        return '\n'.join(lst)

    def __str__(self):
        lst = []
        indent = ' '
        lst.append('ID: {0}'.format(self._id))
        lst.append('Name: {0}'.format(self.name))
        lst.append('TemporalDomain: Shape=>{0} Axes=>{1}'.format(self.temporal_domain.shape.extents, self.temporal_domain.crs.axes))
        lst.append('SpatialDomain: Shape=>{0} Axes=>{1}'.format(self.spatial_domain.shape.extents, self.spatial_domain.crs.axes))
        lst.append('Coordinate Parameters: {0}'.format(self.list_parameters(coords_only=True)))
        lst.append('Data Parameters: {0}'.format(self.list_parameters(coords_only=False, data_only=True)))

        return '\n'.join(lst)
Ejemplo n.º 29
0
class CTDL1PressureTransform(TransformFunction):
    ''' A basic transform that receives input through a subscription,
    parses the input from a CTD, extracts the pressure vaule and scales it accroding to
    the defined algorithm. If the transform
    has an output_stream it will publish the output on the output stream.

    '''

    # Make the stream definitions of the transform class attributes... best available option I can think of?
    incoming_stream_def = L0_pressure_stream_definition()
    outgoing_stream_def = L1_pressure_stream_definition()

    def __init__(self):

        ### Parameter dictionaries
        self.defining_parameter_dictionary()

    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=np.float32))
        pres_ctxt.uom = 'degree_Celsius'
        pres_ctxt.fill_value = 0e0

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.pres = ParameterDictionary()
        self.pres.add_context(t_ctxt)
        self.pres.add_context(lat_ctxt)
        self.pres.add_context(lon_ctxt)
        self.pres.add_context(height_ctxt)
        self.pres.add_context(pres_ctxt)
        self.pres.add_context(data_ctxt)


    def execute(self, granule):
        """Processes incoming data!!!!
        """

        rdt = RecordDictionaryTool.load_from_granule(granule)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        pressure = get_safe(rdt, 'pres') #psd.get_values('conductivity')

        longitude = get_safe(rdt, 'lon') # psd.get_values('longitude')
        latitude = get_safe(rdt, 'lat')  #psd.get_values('latitude')
        time = get_safe(rdt, 'time') # psd.get_values('time')
        height = get_safe(rdt, 'height') # psd.get_values('time')

        log.warn('Got pressure: %s' % str(pressure))


        # L1
        # 1) The algorithm input is the L0 pressure data product (p_hex) and, in the case of the SBE 37IM, the pressure range (P_rng) from metadata.
        # 2) Convert the hexadecimal string to a decimal string
        # 3) For the SBE 37IM only, convert the pressure range (P_rng) from psia to dbar SBE 37IM
        #    Convert P_rng (input from metadata) from psia to dbar
        # 4) Perform scaling operation
        #    SBE 37IM
        #    L1 pressure data product (in dbar):


        # Use the constructor to put data into a granule
        psc = PointSupplementConstructor(point_definition=self.outgoing_stream_def, stream_id=self.streams['output'])
        ### Assumes the config argument for output streams is known and there is only one 'output'.
        ### the stream id is part of the metadata which much go in each stream granule - this is awkward to do at the
        ### application level like this!

        scaled_pressure = pressure

        for i in xrange(len(pressure)):
            #todo: get pressure range from metadata (if present) and include in calc
            scaled_pressure[i] = ( pressure[i])

        root_rdt = RecordDictionaryTool(taxonomy=self.tx)

        #todo: use only flat dicts for now, may change later...
#        data_rdt = RecordDictionaryTool(taxonomy=self.tx)
#        coord_rdt = RecordDictionaryTool(taxonomy=self.tx)

        root_rdt['pres'] = scaled_pressure
        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

#        root_rdt['coordinates'] = coord_rdt
#        root_rdt['data'] = data_rdt

        return build_granule(data_producer_id='ctd_L1_pressure', taxonomy=self.tx, record_dictionary=root_rdt)

        return psc.close_stream_granule()
Ejemplo n.º 30
0
    def _create_parameter_dictionary(self):
        pdict = ParameterDictionary()

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        return pdict
Ejemplo n.º 31
0
    def get_param_dict(self):
        pdict = ParameterDictionary()

        cond_ctxt = ParameterContext(
            'conductivity', param_type=QuantityType(value_encoding=np.float64))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext(
            'pressure', param_type=QuantityType(value_encoding=np.float64))
        pres_ctxt.uom = 'unknown'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext(
            'temperature', param_type=QuantityType(value_encoding=np.float64))
        temp_ctxt.uom = 'unknown'
        temp_ctxt.fill_value = 0x0
        pdict.add_context(temp_ctxt)

        oxy_ctxt = ParameterContext(
            'oxygen', param_type=QuantityType(value_encoding=np.float64))
        oxy_ctxt.uom = 'unknown'
        oxy_ctxt.fill_value = 0x0
        pdict.add_context(oxy_ctxt)

        internal_ts_ctxt = ParameterContext(
            name='internal_timestamp',
            param_type=QuantityType(value_encoding=np.float64))
        internal_ts_ctxt._derived_from_name = 'time'
        internal_ts_ctxt.uom = 'seconds'
        internal_ts_ctxt.fill_value = -1
        pdict.add_context(internal_ts_ctxt, is_temporal=True)

        driver_ts_ctxt = ParameterContext(
            name='driver_timestamp',
            param_type=QuantityType(value_encoding=np.float64))
        driver_ts_ctxt._derived_from_name = 'time'
        driver_ts_ctxt.uom = 'seconds'
        driver_ts_ctxt.fill_value = -1
        pdict.add_context(driver_ts_ctxt)

        return pdict
Ejemplo n.º 32
0
    def sync_rdt_with_coverage(self,
                               coverage=None,
                               tdoa=None,
                               start_time=None,
                               end_time=None,
                               stride_time=None,
                               parameters=None):
        '''
        Builds a granule based on the coverage
        '''
        if coverage is None:
            coverage = self.coverage

        slice_ = slice(None)  # Defaults to all values
        if tdoa is not None and isinstance(tdoa, slice):
            slice_ = tdoa

        elif stride_time is not None:
            validate_is_instance(start_time, Number,
                                 'start_time must be a number for striding.')
            validate_is_instance(end_time, Number,
                                 'end_time must be a number for striding.')
            validate_is_instance(stride_time, Number,
                                 'stride_time must be a number for striding.')
            ugly_range = np.arange(start_time, end_time, stride_time)
            idx_values = [
                TimeUtils.get_relative_time(coverage, i) for i in ugly_range
            ]
            slice_ = [idx_values]

        elif not (start_time is None and end_time is None):
            time_var = coverage._temporal_param_name
            uom = coverage.get_parameter_context(time_var).uom
            if start_time is not None:
                start_units = TimeUtils.ts_to_units(uom, start_time)
                log.info('Units: %s', start_units)
                start_idx = TimeUtils.get_relative_time(coverage, start_units)
                log.info('Start Index: %s', start_idx)
                start_time = start_idx
            if end_time is not None:
                end_units = TimeUtils.ts_to_units(uom, end_time)
                log.info('End units: %s', end_units)
                end_idx = TimeUtils.get_relative_time(coverage, end_units)
                log.info('End index: %s', end_idx)
                end_time = end_idx
            slice_ = slice(start_time, end_time, stride_time)
            log.info('Slice: %s', slice_)

        if parameters is not None:
            pdict = ParameterDictionary()
            params = set(coverage.list_parameters()).intersection(parameters)
            for param in params:
                pdict.add_context(coverage.get_parameter_context(param))
            rdt = RecordDictionaryTool(param_dictionary=pdict)
            self.pdict = pdict
        else:
            rdt = RecordDictionaryTool(
                param_dictionary=coverage.parameter_dictionary)

        fields = coverage.list_parameters()
        if parameters is not None:
            fields = set(fields).intersection(parameters)

        for d in fields:
            rdt[d] = coverage.get_parameter_values(d, tdoa=slice_)
        self.rdt = rdt  # Sync
Ejemplo n.º 33
0
class DensityTransform(TransformFunction):
    ''' A basic transform that receives input through a subscription,
    parses the input from a CTD, extracts the conductivity, density and Temperature value and calculates density
    according to the defined algorithm. If the transform
    has an output_stream it will publish the output on the output stream.

    '''

    # Make the stream definitions of the transform class attributes... best available option I can think of?
    incoming_stream_def = SBE37_CDM_stream_definition()
    outgoing_stream_def = L2_density_stream_definition()

    def __init__(self):

#        ### Taxonomies are defined before hand out of band... somehow.
#        tx = TaxyTool()
#        tx.add_taxonomy_set('density','long name for density')
#        tx.add_taxonomy_set('lat','long name for latitude')
#        tx.add_taxonomy_set('lon','long name for longitude')
#        tx.add_taxonomy_set('height','long name for height')
#        tx.add_taxonomy_set('time','long name for time')
#        # This is an example of using groups it is not a normative statement about how to use groups
#        tx.add_taxonomy_set('coordinates','This group contains coordinates...')
#        tx.add_taxonomy_set('data','This group contains data...')

        ### Parameter dictionaries
        self.defining_parameter_dictionary()

    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        dens_ctxt = ParameterContext('dens', param_type=QuantityType(value_encoding=np.float32))
        dens_ctxt.uom = 'degree_Celsius'
        dens_ctxt.fill_value = 0e0

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.dens = ParameterDictionary()
        self.dens.add_context(t_ctxt)
        self.dens.add_context(lat_ctxt)
        self.dens.add_context(lon_ctxt)
        self.dens.add_context(height_ctxt)
        self.dens.add_context(dens_ctxt)
        self.dens.add_context(data_ctxt)


    def execute(self, granule):
        """Processes incoming data!!!!
        """

        rdt = RecordDictionaryTool.load_from_granule(granule)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        temperature = get_safe(rdt, 'temp')
        conductivity = get_safe(rdt, 'cond')
        density = get_safe(rdt, 'dens')

        longitude = get_safe(rdt, 'lon')
        latitude = get_safe(rdt, 'lat')
        time = get_safe(rdt, 'time')
        height = get_safe(rdt, 'height')


        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got density: %s' % str(density))
        log.warn('Got temperature: %s' % str(temperature))


        sp = SP_from_cndr(r=conductivity/cte.C3515, t=temperature, p=density)

        sa = SA_from_SP(sp, density, longitude, latitude)

        density = rho(sa, temperature, density)

        log.warn('Got density: %s' % str(density))

        # Use the constructor to put data into a granule
        #psc = PointSupplementConstructor(point_definition=self.outgoing_stream_def, stream_id=self.streams['output'])
        ### Assumes the config argument for output streams is known and there is only one 'output'.
        ### the stream id is part of the metadata which much go in each stream granule - this is awkward to do at the
        ### application level like this!

        root_rdt = RecordDictionaryTool(param_dictionary=self.dens)
        #todo: use only flat dicts for now, may change later...
#        data_rdt = RecordDictionaryTool(taxonomy=self.tx)
#        coord_rdt = RecordDictionaryTool(taxonomy=self.tx)

        root_rdt['density'] = density
        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

#        root_rdt['coordinates'] = coord_rdt
#        root_rdt['data'] = data_rdt

        return build_granule(data_producer_id='ctd_L2_density', param_dictionary=self.dens, record_dictionary=root_rdt)
class RecordDictionaryToolTestCase(unittest.TestCase):

    def setUp(self):

        self._tx = TaxyTool()
        self._tx.add_taxonomy_set('temp', 'long_temp_name')
        self._tx.add_taxonomy_set('cond', 'long_cond_name')
        self._tx.add_taxonomy_set('pres', 'long_pres_name')
        self._tx.add_taxonomy_set('rdt')
        self._tx.add_taxonomy_set('rdt2')
        # map is {<local name>: <granule name or path>}

        self._rdt = RecordDictionaryTool(taxonomy=self._tx)

        self._pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        self._pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        self._pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        self._pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        self._pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        self._pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        self._pdict.add_context(pres_ctxt)

        self._rdt_pdict = RecordDictionaryTool(param_dictionary=self._pdict)

    def test_init(self):

        # initialize with a ParameterDictionary
        rdt = RecordDictionaryTool(param_dictionary=self._pdict)
        self.assertIsInstance(rdt._param_dict, ParameterDictionary)

        # initialize with nonsense
        self.assertRaises(TypeError, RecordDictionaryTool, ['foo', 'barr'])

        # initialize with a valid shape
        rdt = RecordDictionaryTool(param_dictionary=self._pdict, shape=(5,2))
        self.assertEquals(rdt._shp, (5,2))

        rdt = RecordDictionaryTool(param_dictionary=self._pdict, shape=(5,))
        self.assertEquals(rdt._shp, (5,))

        rdt = RecordDictionaryTool(param_dictionary=self._tx, shape=5)
        self.assertEquals(rdt._shp, (5,))

        # initialize with no length
        rdt = RecordDictionaryTool(param_dictionary=self._pdict)
        self.assertEquals(rdt._shp, None)

    def test_init_with_taxonomy(self):

        # initialize with a taxonomy tool
        rdt = RecordDictionaryTool(taxonomy=self._tx)
        self.assertIsInstance(rdt._tx, TaxyTool)

        # initialize with a taxonomy object
        rdt = RecordDictionaryTool(taxonomy=self._tx._t)
        self.assertIsInstance(rdt._tx, TaxyTool)

        # initialize with pooo
        self.assertRaises(TypeError, RecordDictionaryTool, ['foo', 'barr'])

        # initialize with a valid shape
        rdt = RecordDictionaryTool(taxonomy=self._tx, shape=(5,2))
        self.assertEquals(rdt._shp, (5,2))

        rdt = RecordDictionaryTool(taxonomy=self._tx, shape=(5,))
        self.assertEquals(rdt._shp, (5,))

        rdt = RecordDictionaryTool(taxonomy=self._tx, shape=5)
        self.assertEquals(rdt._shp, (5,))

        # initialize with no length
        rdt = RecordDictionaryTool(taxonomy=self._tx)
        self.assertEquals(rdt._shp, None)

    def test_set_and_get(self):
        #make sure you can set and get items in the granule by name in the taxonomy

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)


        self.assertRaises(KeyError, self._rdt.__setitem__, 'long_temp_name', temp_array)
        self.assertRaises(KeyError, self._rdt.__setitem__, 'nonsense', temp_array)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertTrue(numpy.allclose(self._rdt_pdict['temp'], temp_array))
        self.assertTrue(numpy.allclose(self._rdt_pdict['conductivity'], cond_array))
        self.assertTrue(numpy.allclose(self._rdt_pdict['pres'], pres_array))

        #want to check to make sure a KeyError is raised when a non-nickname key is used, but it's not working correctly
        self.assertRaises(KeyError, self._rdt.__getitem__, 'long_temp_name')
        self.assertRaises(KeyError, self._rdt.__getitem__,'nonsense!')

        pdict =self._pdict
        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['temp'] = temp_array

        # Now test bad values... list not numpy array...
        with self.assertRaises(TypeError) as te:
            rdt['temp'] = [1,2,3]

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'list'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
            )

        # Now test numpy scalar array...
        with self.assertRaises(TypeError) as te:
            rdt['temp'] = numpy.float32(3.14159)

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'numpy.float32'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
        )


        # Now test rank zero array...
        with self.assertRaises(ValueError) as te:
            rdt['temp'] = numpy.array(22.5)

        self.assertEquals(
            te.exception.message,
            '''The rank of a value sequence array in a record dictionary must be greater than zero. Got name "temp" with rank "0"'''
        )

        #TODO: Fix record_dictionary shape validation, then put this test back in
#        # Test set invalid shape
#        pres_array = numpy.random.standard_normal(90)
#        with self.assertRaises(ValueError) as te:
#            rdt['pres'] = pres_array
#
#        self.assertEquals(
#            te.exception.message,
#            '''Invalid array shape "(90,)" for name "pres"; Record dictionary defined shape is "(100,)"'''
#        )


        # make a new RDT for testing higher rank objects...
        rdt = RecordDictionaryTool(param_dictionary=self._pdict)

        # Now test rank 2 array...
        rdt['temp'] = numpy.array([[22.5,],])
        self.assertTrue((rdt['temp'] == numpy.array([[22.5,],])).all())

        # Now test rank 2 array...
        rdt['conductivity'] = numpy.array([[28.5,],])
        self.assertTrue((rdt['conductivity'] == numpy.array([[28.5,],])).all())

    def test_set_and_get_with_taxonomy(self):
        #make sure you can set and get items in the granule by name in the taxonomy

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)


        self.assertRaises(KeyError, self._rdt.__setitem__, 'long_temp_name',temp_array)
        self.assertRaises(KeyError, self._rdt.__setitem__, 'nonsense',temp_array)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        self.assertTrue(numpy.allclose(self._rdt['temp'], temp_array))
        self.assertTrue(numpy.allclose(self._rdt['cond'], cond_array))
        self.assertTrue(numpy.allclose(self._rdt['pres'], pres_array))

        #want to check to make sure a KeyError is raised when a non-nickname key is used, but it's not working correctly
        self.assertRaises(KeyError, self._rdt.__getitem__, 'long_temp_name')
        self.assertRaises(KeyError, self._rdt.__getitem__,'nonsense!')

        taxy_tool_obj =self._tx
        rdt = RecordDictionaryTool(taxonomy=taxy_tool_obj)
        rdt['temp'] = temp_array
        self._rdt['rdt'] = rdt

        # Now test when the Record Dictionary Tool is created with the Taxonomy object rather than the TaxyTool
        # This can fail if the == method for TaxyTool is implemented incorrectly

        taxonomy_ion_obj = self._tx._t
        rdt2 = RecordDictionaryTool(taxonomy=taxonomy_ion_obj)
        rdt2['temp'] = temp_array
        self._rdt['rdt2'] = rdt2


        # Now test bad values... list not numpy array...
        with self.assertRaises(TypeError) as te:
            rdt2['temp'] = [1,2,3]

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'list'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
        )

        # Now test numpy scalar array...
        with self.assertRaises(TypeError) as te:
            rdt2['temp'] = numpy.float32(3.14159)

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'numpy.float32'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
        )


        # Now test rank zero array...
        with self.assertRaises(ValueError) as te:
            rdt2['temp'] = numpy.array(22.5)

        self.assertEquals(
            te.exception.message,
            '''The rank of a value sequence array in a record dictionary must be greater than zero. Got name "temp" with rank "0"'''
        )

        #TODO: Fix record_dictionary shape validation, then put this test back in
#        # Test set invalid shape
#        pres_array = numpy.random.standard_normal(90)
#        with self.assertRaises(ValueError) as te:
#            rdt2['pres'] = pres_array
#
#        self.assertEquals(
#            te.exception.message,
#            '''Invalid array shape "(90,)" for name "pres"; Record dictionary defined shape is "(100,)"'''
#        )


        # make a new RDT for testing higher rank objects...
        taxy_tool_obj =self._tx
        rdt = RecordDictionaryTool(taxonomy=taxy_tool_obj)

        # Now test rank 2 array...
        rdt['temp'] = numpy.array([[22.5,],])
        self.assertTrue((rdt['temp'] == numpy.array([[22.5,],])).all())

        # Now test rank 2 array...
        rdt['cond'] = numpy.array([[28.5,],])
        self.assertTrue((rdt['cond'] == numpy.array([[28.5,],])).all())


    def test_iteration(self):
        #Test all four iteration methods for items in the granule

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        for k, v in self._rdt_pdict.iteritems():
            if k == 'temp':
                self.assertTrue(numpy.allclose(temp_array, v))
            elif k == 'conductivity':
                self.assertTrue(numpy.allclose(cond_array, v))
            elif k == 'pres':
                self.assertTrue(numpy.allclose(pres_array, v))
            else:
                self.assertTrue(False)

        for k in self._rdt_pdict.iterkeys():
            self.assertTrue(k == 'temp' or k == 'conductivity' or k == 'pres')

        for v in self._rdt_pdict.itervalues():
            self.assertTrue(numpy.allclose(temp_array, v) or numpy.allclose(cond_array, v) or numpy.allclose(pres_array, v))

        for k in self._rdt_pdict:
            self.assertTrue(k == 'temp' or k == 'conductivity' or k == 'pres')

    def test_iteration_with_taxonomy(self):
        #Test all four iteration methods for items in the granule

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        for k, v in self._rdt.iteritems():
            if k == 'temp':
                self.assertTrue(numpy.allclose(temp_array, v))
            elif k == 'cond':
                self.assertTrue(numpy.allclose(cond_array, v))
            elif k == 'pres':
                self.assertTrue(numpy.allclose(pres_array, v))
            else:
                self.assertTrue(False)

        for k in self._rdt.iterkeys():
            self.assertTrue(k == 'temp' or k == 'cond' or k == 'pres')

        for v in self._rdt.itervalues():
            self.assertTrue(numpy.allclose(temp_array, v) or numpy.allclose(cond_array, v) or numpy.allclose(pres_array, v))

        for k in self._rdt:
            self.assertTrue(k == 'temp' or k == 'cond' or k == 'pres')

    def test_update(self):
        # Update this granule with the content of another. Assert that the taxonomies are the same...



        pres_array = numpy.random.standard_normal(100)
        self._rdt_pdict['pres'] = pres_array

        rdt2 = RecordDictionaryTool(param_dictionary=self._pdict)
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)

        rdt2['temp'] = temp_array
        rdt2['conductivity'] = cond_array

        self._rdt_pdict.update(rdt2)

        self.assertIn('pres', self._rdt_pdict)
        self.assertIn('temp', self._rdt_pdict)
        self.assertIn('conductivity', self._rdt_pdict)

        self.assertTrue((self._rdt_pdict['pres'] == pres_array).all())
        self.assertTrue((self._rdt_pdict['conductivity'] == cond_array).all())
        self.assertTrue((self._rdt_pdict['temp'] == temp_array).all())

        self.assertEquals(len(self._rdt_pdict), 3)

    def test_update_with_taxonomy(self):
        # Update this granule with the content of another. Assert that the taxonomies are the same...



        pres_array = numpy.random.standard_normal(100)
        self._rdt['pres'] = pres_array

        rdt2 = RecordDictionaryTool(taxonomy=self._tx)
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)

        rdt2['temp'] = temp_array
        rdt2['cond'] = cond_array

        self._rdt.update(rdt2)

        self.assertIn('pres', self._rdt)
        self.assertIn('temp', self._rdt)
        self.assertIn('cond', self._rdt)

        self.assertTrue((self._rdt['pres'] == pres_array).all())
        self.assertTrue((self._rdt['cond'] == cond_array).all())
        self.assertTrue((self._rdt['temp'] == temp_array).all())

        self.assertEquals(len(self._rdt), 3)

    def test_len(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertEquals(len(self._rdt_pdict), 3)

    def test_len_with_taxonomy(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        self.assertEquals(len(self._rdt), 3)

    def test_repr(self):
        # Come up with a reasonable string representation of the granule for debug purposes only

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array
        self.assertTrue(len(repr(self._rdt_pdict)) > 0)

    def test_repr_with_taxonomy(self):
        # Come up with a reasonable string representation of the granule for debug purposes only

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array
        self.assertTrue(len(repr(self._rdt)) > 0)

    def test_delete(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertIn('pres', self._rdt_pdict)
        self.assertIn('temp', self._rdt_pdict)
        self.assertIn('conductivity', self._rdt_pdict)

        del self._rdt_pdict['pres']

        self.assertNotIn('pres', self._rdt_pdict)
        self.assertIn('temp', self._rdt_pdict)
        self.assertIn('conductivity', self._rdt_pdict)

    def test_delete_with_taxonomy(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        self.assertIn('pres', self._rdt)
        self.assertIn('temp', self._rdt)
        self.assertIn('cond', self._rdt)

        del self._rdt['pres']

        self.assertNotIn('pres', self._rdt)
        self.assertIn('temp', self._rdt)
        self.assertIn('cond', self._rdt)

    def test_contains(self):

        # foobar isn't even in the taxonomy!
        self.assertNotIn('foobar', self._rdt_pdict)

        # Temp is in the ParameterDictionary but not the record dictionary
        self.assertNotIn('temp', self._rdt_pdict)


        # Now put in some data and make sure it works...
        temp_array = numpy.random.standard_normal(100)
        self._rdt_pdict['temp'] = temp_array

        self.assertIn('temp', self._rdt_pdict)

    def test_contains_with_taxonomy(self):

        # foobar isn't even in the taxonomy!
        self.assertNotIn('foobar', self._rdt)

        # Temp is in the taxonomy but not the record dictionary
        self.assertNotIn('temp', self._rdt)


        # Now put in some data and make sure it works...
        temp_array = numpy.random.standard_normal(100)
        self._rdt['temp'] = temp_array

        self.assertIn('temp', self._rdt)

    @unittest.skip('Pretty print not implemented at this time')
    def test_pretty_print(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertGreater(len(self._rdt_pdict.pretty_print()), 0)

    @unittest.skip('Pretty print not implemented at this time')
    def test_pretty_print_with_taxonomy(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array


        rdt = RecordDictionaryTool(taxonomy=self._tx)
        rdt['rdt'] = temp_array
        self._rdt['rdt'] = rdt

        self.assertGreater(len(self._rdt.pretty_print()), 0)
Ejemplo n.º 35
0
    def get_param_dict(self):
        pdict = ParameterDictionary()

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=np.float64))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pressure', param_type=QuantityType(value_encoding=np.float64))
        pres_ctxt.uom = 'unknown'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext('temperature', param_type=QuantityType(value_encoding=np.float64))
        temp_ctxt.uom = 'unknown'
        temp_ctxt.fill_value = 0x0
        pdict.add_context(temp_ctxt)

        oxy_ctxt = ParameterContext('oxygen', param_type=QuantityType(value_encoding=np.float64))
        oxy_ctxt.uom = 'unknown'
        oxy_ctxt.fill_value = 0x0
        pdict.add_context(oxy_ctxt)

        internal_ts_ctxt = ParameterContext(name='internal_timestamp', param_type=QuantityType(value_encoding=np.float64))
        internal_ts_ctxt._derived_from_name = 'time'
        internal_ts_ctxt.uom = 'seconds'
        internal_ts_ctxt.fill_value = -1
        pdict.add_context(internal_ts_ctxt, is_temporal=True)

        driver_ts_ctxt = ParameterContext(name='driver_timestamp', param_type=QuantityType(value_encoding=np.float64))
        driver_ts_ctxt._derived_from_name = 'time'
        driver_ts_ctxt.uom = 'seconds'
        driver_ts_ctxt.fill_value = -1
        pdict.add_context(driver_ts_ctxt)

        return pdict
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dms_cli = DatasetManagementServiceClient()
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent(name='example data agent',
                                   handler_module=self.DVR_CONFIG['dvr_mod'],
                                   handler_class=self.DVR_CONFIG['dvr_cls'])
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance(
            name='example dataset agent instance')
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(
            eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(name='example data provider',
                                     institution=Institution(),
                                     contact=ContactInformation())
        dprov.contact.individual_names_given = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(name='example datasource',
                          protocol_type='FILE',
                          institution=Institution(),
                          contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.individual_names_given = 'Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'ruv_test_dataset'
        dset = ExternalDataset(name=ds_name,
                               dataset_description=DatasetDescription(),
                               update_description=UpdateDescription(),
                               contact=ContactInformation())

        dset.dataset_description.parameters['base_url'] = 'test_data/ruv/'
        dset.dataset_description.parameters[
            'list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv'
        dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M'
        dset.dataset_description.parameters[
            'date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv'
        dset.dataset_description.parameters['temporal_dimension'] = None
        dset.dataset_description.parameters['zonal_dimension'] = None
        dset.dataset_description.parameters['meridional_dimension'] = None
        dset.dataset_description.parameters['vertical_dimension'] = None
        dset.dataset_description.parameters['variables'] = []

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='ruv_model')
        #dsrc_model.model = 'RUV'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(
            external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(
            external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(
            data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(
            data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(
            external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(
            external_dataset_id=ds_id, agent_instance_id=eda_inst_id)

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext(
            'data',
            param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            pc_list.append(dms_cli.create_parameter_context(
                pc_k, pc[1].dump()))

        pdict_id = dms_cli.create_parameter_dictionary('ruv_param_dict',
                                                       pc_list)

        streamdef_id = pubsub_cli.create_stream_definition(
            name="ruv",
            description="stream def for ruv testing",
            parameter_dictionary_id=pdict_id)

        dprod = IonObject(RT.DataProduct,
                          name='ruv_parsed_product',
                          description='parsed ruv product')

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(
            data_product=dprod, stream_definition_id=streamdef_id)

        dams_cli.assign_data_product(input_resource_id=ds_id,
                                     data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id,
                                              predicate=PRED.hasStream,
                                              object_type=RT.Stream,
                                              id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({
            'ExternalDataset': ds_id,
            'ExternalDataProvider': ext_dprov_id,
            'DataSource': ext_dsrc_id,
            'DataSourceModel': ext_dsrc_model_id,
            'DataProducer': dproducer_id,
            'DataProduct': dproduct_id,
            'Stream': stream_id
        }))

        #CBM: Eventually, probably want to group this crap somehow - not sure how yet...

        # Create the logger for receiving publications
        _, stream_route, _ = self.create_stream_and_logger(name='ruv',
                                                           stream_id=stream_id)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id': stream_id,
            'stream_route': stream_route,
            'external_dataset_res': dset,
            'param_dictionary': pdict.dump(),
            'data_producer_id':
            dproducer_id,  # CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records': 20,
        }
    def _setup_resources(self):
        # TODO: some or all of this (or some variation) should move to DAMS'

        # Build the test resources for the dataset
        dams_cli = DataAcquisitionManagementServiceClient()
        dpms_cli = DataProductManagementServiceClient()
        rr_cli = ResourceRegistryServiceClient()
        pubsub_cli = PubsubManagementServiceClient()

        eda = ExternalDatasetAgent()
        eda_id = dams_cli.create_external_dataset_agent(eda)

        eda_inst = ExternalDatasetAgentInstance()
        eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)

        # Create and register the necessary resources/objects

        # Create DataProvider
        dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
        dprov.contact.name = 'Christopher Mueller'
        dprov.contact.email = '*****@*****.**'

        # Create DataSource
        dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation())
        dsrc.connection_params['base_data_url'] = ''
        dsrc.contact.name='Tim Giguere'
        dsrc.contact.email = '*****@*****.**'

        # Create ExternalDataset
        ds_name = 'usgs_test_dataset'
        dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())

        # The usgs.nc test dataset is a download of the R1 dataset found here:
        # http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
        dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
        dset.dataset_description.parameters['temporal_dimension'] = 'time'
        dset.dataset_description.parameters['zonal_dimension'] = 'lon'
        dset.dataset_description.parameters['meridional_dimension'] = 'lat'
        dset.dataset_description.parameters['vertical_dimension'] = 'z'
        dset.dataset_description.parameters['variables'] = [
            'water_temperature',
            'streamflow',
            'water_temperature_bottom',
            'water_temperature_middle',
            'specific_conductance',
            'data_qualifier',
            ]

        # Create DataSourceModel
        dsrc_model = DataSourceModel(name='dap_model')
        dsrc_model.model = 'DAP'
        dsrc_model.data_handler_module = 'N/A'
        dsrc_model.data_handler_class = 'N/A'

        ## Run everything through DAMS
        ds_id = dams_cli.create_external_dataset(external_dataset=dset)
        ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
        ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
        ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)

        # Register the ExternalDataset
        dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)

        # Or using each method
        dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
        dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
        dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
        dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
        #        dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)

        #create temp streamdef so the data product can create the stream
        streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dprod = IonObject(RT.DataProduct,
            name='usgs_parsed_product',
            description='parsed usgs product',
            temporal_domain = tdom,
            spatial_domain = sdom)

        # Generate the data product and associate it to the ExternalDataset
        dproduct_id = dpms_cli.create_data_product(data_product=dprod,
                                                    stream_definition_id=streamdef_id,
                                                    parameter_dictionary=parameter_dictionary)

        dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)

        stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
        stream_id = stream_id[0]

        log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))

        #CBM: Use CF standard_names

#        ttool = TaxyTool()
#        ttool.add_taxonomy_set('time','time')
#        ttool.add_taxonomy_set('lon','longitude')
#        ttool.add_taxonomy_set('lat','latitude')
#        ttool.add_taxonomy_set('z','water depth')
#        ttool.add_taxonomy_set('water_temperature', 'average water temperature')
#        ttool.add_taxonomy_set('water_temperature_bottom','water temperature at bottom of water column')
#        ttool.add_taxonomy_set('water_temperature_middle', 'water temperature at middle of water column')
#        ttool.add_taxonomy_set('streamflow', 'flow velocity of stream')
#        ttool.add_taxonomy_set('specific_conductance', 'specific conductance of water')
#        ttool.add_taxonomy_set('data_qualifier','data qualifier flag')
#
#        ttool.add_taxonomy_set('coords','This group contains coordinate parameters')
#        ttool.add_taxonomy_set('data','This group contains data parameters')

        # Create the logger for receiving publications
        self.create_stream_and_logger(name='usgs',stream_id=stream_id)

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('water_temperature', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_bottom', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('water_temperature_middle', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        temp_ctxt = ParameterContext('z', param_type=QuantityType(value_encoding = numpy.dtype('float32')))
        temp_ctxt.uom = 'meters'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('streamflow', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('specific_conductance', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        pres_ctxt = ParameterContext('data_qualifier', param_type=QuantityType(value_encoding=numpy.dtype('bool')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        self.EDA_RESOURCE_ID = ds_id
        self.EDA_NAME = ds_name
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING':True,
            'stream_id':stream_id,
            #'taxonomy':ttool.dump(),
            'param_dictionary':pdict.dump(),
            'data_producer_id':dproducer_id,#CBM: Should this be put in the main body of the config - with mod & cls?
            'max_records':4,
        }
Ejemplo n.º 38
0
class CTDL1ConductivityTransform(TransformFunction):
    ''' A basic transform that receives input through a subscription,
    parses the input from a CTD, extracts the conductivity value and scales it according to
    the defined algorithm. If the transform
    has an output_stream it will publish the output on the output stream.

    '''

    # Make the stream definitions of the transform class attributes... best available option I can think of?
    incoming_stream_def = L0_conductivity_stream_definition()
    #outgoing_stream_def = L1_conductivity_stream_definition()

    def __init__(self):

        ### Parameter dictionaries
        self.defining_parameter_dictionary()


    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        cond_ctxt = ParameterContext('cond', param_type=QuantityType(value_encoding=np.float32))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.cond = ParameterDictionary()
        self.cond.add_context(t_ctxt)
        self.cond.add_context(lat_ctxt)
        self.cond.add_context(lon_ctxt)
        self.cond.add_context(height_ctxt)
        self.cond.add_context(cond_ctxt)
        self.cond.add_context(data_ctxt)

    def execute(self, granule):
        """Processes incoming data!!!!
        """
        rdt = RecordDictionaryTool.load_from_granule(granule)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        conductivity = get_safe(rdt, 'cond') #psd.get_values('conductivity')

        longitude = get_safe(rdt, 'lon') # psd.get_values('longitude')
        latitude = get_safe(rdt, 'lat')  #psd.get_values('latitude')
        time = get_safe(rdt, 'time') # psd.get_values('time')
        height = get_safe(rdt, 'height') # psd.get_values('time')

        log.warn('CTDL1ConductivityTransform: Got conductivity: %s' % str(conductivity))

        root_rdt = RecordDictionaryTool(param_dictionary=self.cond)

        #todo: use only flat dicts for now, may change later...
#        data_rdt = RecordDictionaryTool(taxonomy=self.tx)
#        coord_rdt = RecordDictionaryTool(taxonomy=self.tx)

        scaled_conductivity = conductivity

        for i in xrange(len(conductivity)):
            scaled_conductivity[i] = (conductivity[i] / 100000.0) - 0.5

        root_rdt['cond'] = scaled_conductivity
        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

#        root_rdt['coordinates'] = coord_rdt
#        root_rdt['data'] = data_rdt

        return build_granule(data_producer_id='ctd_L1_conductivity', param_dictionary=self.cond, record_dictionary=root_rdt)
Ejemplo n.º 39
0
    def test_build_granule_and_load_from_granule(self):
        pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=np.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=np.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=np.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        pdict.add_context(pres_ctxt)

        rdt = RecordDictionaryTool(param_dictionary=pdict)

        #Create some arrays and fill them with random values
        temp_array = np.random.standard_normal(100)
        cond_array = np.random.standard_normal(100)
        pres_array = np.random.standard_normal(100)
        time_array = np.random.standard_normal(100)
        lat_array = np.random.standard_normal(100)
        lon_array = np.random.standard_normal(100)

        #Use the RecordDictionaryTool to add the values. This also would work if you used long_temp_name, etc.
        rdt['temp'] = temp_array
        rdt['conductivity'] = cond_array
        rdt['pres'] = pres_array
        rdt['time'] = time_array
        rdt['lat'] = lat_array
        rdt['lon'] = lon_array

        g = build_granule(data_producer_id='john', record_dictionary=rdt, param_dictionary=pdict)

        l_pd = ParameterDictionary.load(g.param_dictionary)

        #l_tx = TaxyTool.load_from_granule(g)

        l_rd = RecordDictionaryTool.load_from_granule(g)

        # Make sure we got back the same Taxonomy Object
        #self.assertEquals(l_pd, pdict)
        self.assertEquals(l_pd.ord_from_key('temp'), pdict.ord_from_key('temp'))
        self.assertEquals(l_pd.ord_from_key('conductivity'), pdict.ord_from_key('conductivity'))


        # Now test the record dictionary object
        self.assertEquals(l_rd._rd, rdt._rd)
        #self.assertEquals(l_rd._param_dict, rdt._param_dict)


        for k, v in l_rd.iteritems():
            self.assertIn(k, rdt)

            if isinstance(v, np.ndarray):
                self.assertTrue( (v == rdt[k]).all())

            else:
                self.assertEquals(v._rd, rdt[k]._rd)
Ejemplo n.º 40
0
class SalinityTransform(TransformFunction):
    '''
    L2 Transform for CTD Data.
    Input is conductivity temperature and salsure delivered as a single packet.
    Output is Practical Salinity as calculated by the Gibbs Seawater package
    '''

    outgoing_stream_def = L2_practical_salinity_stream_definition()
    incoming_stream_def = SBE37_CDM_stream_definition()

    def __init__(self):


#        ### Taxonomies are defined before hand out of band... somehow.
#        tx = TaxyTool()
#        tx.add_taxonomy_set('salinity','long name for salinity')
#        tx.add_taxonomy_set('lat','long name for latitude')
#        tx.add_taxonomy_set('lon','long name for longitude')
#        tx.add_taxonomy_set('height','long name for height')
#        tx.add_taxonomy_set('time','long name for time')
#        # This is an example of using groups it is not a normative statement about how to use groups
#        tx.add_taxonomy_set('coordinates','This group contains coordinates...')
#        tx.add_taxonomy_set('data','This group contains data...')

        ### Parameter dictionaries
        self.defining_parameter_dictionary()

    def defining_parameter_dictionary(self):

        # Define the parameter context objects

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 1970-01-01'
        t_ctxt.fill_value = 0x0

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt.fill_value = 0e0

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt.fill_value = 0e0

        height_ctxt = ParameterContext('height', param_type=QuantityType(value_encoding=np.float32))
        height_ctxt.reference_frame = AxisTypeEnum.HEIGHT
        height_ctxt.uom = 'meters'
        height_ctxt.fill_value = 0e0

        sal_ctxt = ParameterContext('cond', param_type=QuantityType(value_encoding=np.float32))
        sal_ctxt.uom = 'unknown'
        sal_ctxt.fill_value = 0e0

        data_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=np.int8))
        data_ctxt.uom = 'byte'
        data_ctxt.fill_value = 0x0

        # Define the parameter dictionary objects

        self.sal = ParameterDictionary()
        self.sal.add_context(t_ctxt)
        self.sal.add_context(lat_ctxt)
        self.sal.add_context(lon_ctxt)
        self.sal.add_context(height_ctxt)
        self.sal.add_context(sal_ctxt)
        self.sal.add_context(data_ctxt)

    def execute(self, granule):
        """Processes incoming data!!!!
        """

        rdt = RecordDictionaryTool.load_from_granule(granule)
        #todo: use only flat dicts for now, may change later...
#        rdt0 = rdt['coordinates']
#        rdt1 = rdt['data']

        temperature = get_safe(rdt, 'sal')
        conductivity = get_safe(rdt, 'cond')
        salsure = get_safe(rdt, 'temp')

        longitude = get_safe(rdt, 'lon')
        latitude = get_safe(rdt, 'lat')
        time = get_safe(rdt, 'time')
        height = get_safe(rdt, 'height')

        log.warn('Got conductivity: %s' % str(conductivity))
        log.warn('Got salsure: %s' % str(salsure))
        log.warn('Got temperature: %s' % str(temperature))

        salinity = SP_from_cndr(r=conductivity/cte.C3515, t=temperature, p=salsure)

        log.warn('Got salinity: %s' % str(salinity))


        root_rdt = RecordDictionaryTool(param_dictionary=self.sal)
        #todo: use only flat dicts for now, may change later...
#        data_rdt = RecordDictionaryTool(taxonomy=self.tx)
#        coord_rdt = RecordDictionaryTool(taxonomy=self.tx)

        root_rdt['salinity'] = salinity
        root_rdt['time'] = time
        root_rdt['lat'] = latitude
        root_rdt['lon'] = longitude
        root_rdt['height'] = height

#        root_rdt['coordinates'] = coord_rdt
#        root_rdt['data'] = data_rdt

        return build_granule(data_producer_id='ctd_L2_salinity', param_dictionary=self.sal, record_dictionary=root_rdt)
Ejemplo n.º 41
0
    def _create_input_param_dict_for_test(self, parameter_dict_name=''):

        pdict = ParameterDictionary()

        t_ctxt = ParameterContext(
            'time',
            param_type=QuantityType(value_encoding=numpy.dtype('float64')))
        t_ctxt.axis = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1900'
        pdict.add_context(t_ctxt)

        cond_ctxt = ParameterContext(
            'conductivity',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = ''
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext(
            'pressure',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = ''
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext(
            'temperature',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = ''
        pdict.add_context(temp_ctxt)

        dens_ctxt = ParameterContext(
            'density',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        dens_ctxt.uom = ''
        pdict.add_context(dens_ctxt)

        sal_ctxt = ParameterContext(
            'salinity',
            param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        sal_ctxt.uom = ''
        pdict.add_context(sal_ctxt)

        #create temp streamdef so the data product can create the stream
        pc_list = []
        for pc_k, pc in pdict.iteritems():
            ctxt_id = self.dataset_management.create_parameter_context(
                pc_k, pc[1].dump())
            pc_list.append(ctxt_id)
            self.addCleanup(self.dataset_management.delete_parameter_context,
                            ctxt_id)

        pdict_id = self.dataset_management.create_parameter_dictionary(
            parameter_dict_name, pc_list)
        self.addCleanup(self.dataset_management.delete_parameter_dictionary,
                        pdict_id)

        return pdict_id
Ejemplo n.º 42
0
def _make_coverage(path):
    tcrs = CRS([AxisTypeEnum.TIME])
    scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT, AxisTypeEnum.HEIGHT])

    tdom = GridDomain(GridShape('temporal', [0]), tcrs, MutabilityEnum.EXTENSIBLE)
    sdom = GridDomain(GridShape('spatial', [0]), scrs, MutabilityEnum.IMMUTABLE) # Dimensionality is excluded for now
        
    pdict = ParameterDictionary()
    t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=np.int64))
    t_ctxt.axis = AxisTypeEnum.TIME
    t_ctxt.uom = 'seconds since 1970-01-01'
    t_ctxt.fill_value = 0x0
    pdict.add_context(t_ctxt)
    
    lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float32))
    lat_ctxt.axis = AxisTypeEnum.LAT
    lat_ctxt.uom = 'degree_north'
    lat_ctxt.fill_value = 0e0
    pdict.add_context(lat_ctxt)

    lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float32))
    lon_ctxt.axis = AxisTypeEnum.LON
    lon_ctxt.uom = 'degree_east'
    lon_ctxt.fill_value = 0e0
    pdict.add_context(lon_ctxt)
    
    cat = {0:'lemon',1:'apple',2:'banana',99:'None'}
    cat_ctxt = ParameterContext('category', param_type=CategoryType(categories=cat))
    cat_ctxt.long_name = "example of category"
    pdict.add_context(cat_ctxt)
    

    dens_ctxt = ParameterContext('quantity', param_type=QuantityType(value_encoding=np.float32))
    dens_ctxt.uom = 'unknown'
    dens_ctxt.fill_value = 0x0
    pdict.add_context(dens_ctxt)
    
    
    const_ctxt = ParameterContext('constant', param_type=ConstantType())
    const_ctxt.long_name = 'example of a parameter of type ConstantType'
    pdict.add_context(const_ctxt)
    
    rec_ctxt = ParameterContext('boolean', param_type=BooleanType())
    rec_ctxt.long_name = 'example of a parameter of type BooleanType'
    pdict.add_context(rec_ctxt)
    
    
    rec_ctxt = ParameterContext('range', param_type=ConstantRangeType())
    rec_ctxt.long_name = 'Range example'
    rec_ctxt.fill_value = 0x0
    pdict.add_context(rec_ctxt)
    
    rec_ctxt = ParameterContext('record', param_type=RecordType())
    rec_ctxt.long_name = 'example of a parameter of type RecordType, will be filled with dictionaries'
    rec_ctxt.fill_value = 0x0
    pdict.add_context(rec_ctxt)
    
    serial_ctxt = ParameterContext('array', param_type=ArrayType())
    serial_ctxt.uom = 'unknown'
    serial_ctxt.fill_value = 0x0
    pdict.add_context(serial_ctxt)
    
    guid = create_guid()
    guid = guid.replace("-", "")
    cov = SimplexCoverage(path, guid, name="sample_cov", parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom)
    
    return (cov,path+os.sep+guid)