Ejemplo n.º 1
0
    def test_build_granule_and_load_from_granule_with_taxonomy(self):

        #Define a taxonomy and add sets. add_taxonomy_set takes one or more names and assigns them to one handle
        tx = TaxyTool()
        tx.add_taxonomy_set('temp', 'long_temp_name')
        tx.add_taxonomy_set('cond', 'long_cond_name')
        tx.add_taxonomy_set('pres', 'long_pres_name')
        tx.add_taxonomy_set('rdt')
        # map is {<local name>: <granule name or path>}

        #Use RecordDictionaryTool to create a record dictionary. Send in the taxonomy so the Tool knows what to expect
        rdt = RecordDictionaryTool(taxonomy=tx)

        #Create some arrays and fill them with random values
        temp_array = np.random.standard_normal(100)
        cond_array = np.random.standard_normal(100)
        pres_array = np.random.standard_normal(100)

        #Use the RecordDictionaryTool to add the values. This also would work if you used long_temp_name, etc.
        rdt['temp'] = temp_array
        rdt['cond'] = cond_array
        rdt['pres'] = pres_array

        #You can also add in another RecordDictionaryTool, providing the taxonomies are the same.
        rdt2 = RecordDictionaryTool(taxonomy=tx)
        rdt2['temp'] = temp_array
        rdt['rdt'] = rdt2


        g = build_granule(data_producer_id='john', taxonomy=tx, record_dictionary=rdt)

        l_tx = TaxyTool.load_from_granule(g)

        l_rd = RecordDictionaryTool.load_from_granule(g)

        # Make sure we got back the same Taxonomy Object
        self.assertEquals(l_tx._t, tx._t)
        self.assertEquals(l_tx.get_handles('temp'), tx.get_handles('temp'))
        self.assertEquals(l_tx.get_handles('testing_2'), tx.get_handles('testing_2'))


        # Now test the record dictionary object
        self.assertEquals(l_rd._rd, rdt._rd)
        self.assertEquals(l_rd._tx._t, rdt._tx._t)


        for k, v in l_rd.iteritems():
            self.assertIn(k, rdt)

            if isinstance(v, np.ndarray):
                self.assertTrue( (v == rdt[k]).all())

            else:
                self.assertEquals(v._rd, rdt[k]._rd)
    def build_granule():
        tt = TaxyTool()
        tt.add_taxonomy_set('c')
        tt.add_taxonomy_set('t')
        tt.add_taxonomy_set('d')

        rdt = RecordDictionaryTool(taxonomy=tt)
        rdt['c'] = np.array([0,1])
        rdt['t'] = np.array([0,1])
        rdt['d'] = np.array([0,1])

        granule = bg(data_producer_id='test_identifier', taxonomy=tt, record_dictionary=rdt)
        return granule
Ejemplo n.º 3
0
    def _get_taxy_tool(self, taxonomy):
        """
        helper to get the TaxyTool version of the given argument.

        @param taxonomy Either a TaxyTool, a Taxonomy, or some value that can
               be processed by TaxyTool.load to create the TaxyTool instance.
        """

        if isinstance(taxonomy, TaxyTool):
            tx = taxonomy
        elif isinstance(taxonomy, Taxonomy):
            tx = TaxyTool(taxonomy)
        else:
            tx = TaxyTool.load(taxonomy)

        return tx
Ejemplo n.º 4
0
    def __init__(self, taxonomy=None, param_dictionary=None, shape=None):
        """
        @brief Initialize a new instance of Record Dictionary Tool with a taxonomy and an optional fixed length
        @param taxonomy is an instance of a TaxonomyTool or Taxonomy (IonObject) used in this record dictionary
        @param length is an optional fixed length for the value sequences of this record dictionary
        """
        if not isinstance(shape, (_NoneType, int, tuple)):
            raise TypeError('Invalid shape argument, received type "%s"; should be None or int or tuple' % type(shape))


        self._rd = {}
        self._shp = shape

        if isinstance(self._shp, int):
            self._shp = (self._shp,)

        # hold onto the taxonomy - we need it to build the granule...

        #Eventually Taxonomy and TaxyTool will go away, to be replaced with ParameterDictionary from coverage-model
        #For now, keep Taxonomy stuff in until everyone can re-work their code.
        if param_dictionary:
            self._param_dict = param_dictionary
            self._tx = None
        elif isinstance(taxonomy, TaxyTool):
            self._tx = taxonomy
            self._param_dict = None
        elif isinstance(taxonomy, Taxonomy):
            self._tx = TaxyTool(taxonomy)
            self._param_dict = None
        else:
            raise TypeError('Invalid taxonomy argument, received type "%s"; should be ParameterDictionary, Taxonomy or TaxyTool' % type(taxonomy))
Ejemplo n.º 5
0
    def validate_mpl_graphs_transform_results(self, results):

        cc = self.container
        assertions = self.assertTrue

        # if its just one granule, wrap it up in a list so we can use the following for loop for a couple of cases
        if isinstance(results,Granule):
            results =[results]

        for g in results:
            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)

                graphs = get_safe(rdt, 'matplotlib_graphs')

                if graphs == None:
                    continue

                for graph in graphs[0]:

                    # At this point only dictionaries containing image data should be passed
                    # For some reason non dictionary values are filtering through.
                    if not isinstance(graph, dict):
                        continue

                    assertions(graph['viz_product_type'] == 'matplotlib_graphs' )
                    # check to see if the list (numpy array) contains actual images
                    assertions(imghdr.what(graph['image_name'], h = graph['image_obj']) == 'png')
Ejemplo n.º 6
0
    def validate_google_dt_transform_results(self, results):

        cc = self.container
        assertions = self.assertTrue

        # if its just one granule, wrap it up in a list so we can use the following for loop for a couple of cases
        if isinstance(results,Granule):
            results =[results]

        for g in results:

            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)

                gdt_data = get_safe(rdt, 'google_dt_components')

                # IF this granule does not contains google dt, skip
                if gdt_data == None:
                    continue

                gdt = gdt_data[0]

                assertions(gdt['viz_product_type'] == 'google_dt' )
                assertions(len(gdt['data_description']) >= 0) # Need to come up with a better check
                assertions(len(gdt['data_content']) >= 0)
Ejemplo n.º 7
0
    def test_combine_granule(self):
        tt = TaxyTool()
        tt.add_taxonomy_set('a')

        rdt = RecordDictionaryTool(tt)
        rdt['a'] = np.array([1,2,3])

        granule1 = build_granule('test',tt,rdt)

        rdt = RecordDictionaryTool(tt)
        rdt['a'] = np.array([4,5,6])
        
        granule2 = build_granule('test',tt,rdt)

        granule3 = combine_granules(granule1,granule2)

        rdt = RecordDictionaryTool.load_from_granule(granule3)

        self.assertTrue(np.allclose(rdt['a'],np.array([1,2,3,4,5,6])))
Ejemplo n.º 8
0
def combine_granules(granule_a, granule_b):
    """
    This is a method that combines granules in a very naive manner
    """
    validate_is_instance(granule_a,Granule, 'granule_a is not a proper Granule')
    validate_is_instance(granule_b,Granule, 'granule_b is not a proper Granule')

    tt_a = TaxyTool.load_from_granule(granule_a)
    tt_b = TaxyTool.load_from_granule(granule_b)

    if tt_a != tt_b:
        raise BadRequest('Can\'t combine the two granules, they do not have the same taxonomy.')

    rdt_new = RecordDictionaryTool(tt_a)
    rdt_a = RecordDictionaryTool.load_from_granule(granule_a)
    rdt_b = RecordDictionaryTool.load_from_granule(granule_b)


    for k in rdt_a.iterkeys():
        rdt_new[k] = np.append(rdt_a[k], rdt_b[k])
    return build_granule(granule_a.data_producer_id, tt_a, rdt_new)
Ejemplo n.º 9
0
    def _get_taxy_tool(self, taxonomy):
        """
        helper to get the TaxyTool version of the given argument.

        @param taxonomy Either a TaxyTool, a Taxonomy, or some value that can
               be processed by TaxyTool.load to create the TaxyTool instance.
        """

        if isinstance(taxonomy, TaxyTool):
            tx = taxonomy
        elif isinstance(taxonomy, Taxonomy):
            tx = TaxyTool(taxonomy)
        else:
            tx = TaxyTool.load(taxonomy)

        return tx
    def setUp(self):

        self._tx = TaxyTool()
        self._tx.add_taxonomy_set('temp', 'long_temp_name')
        self._tx.add_taxonomy_set('cond', 'long_cond_name')
        self._tx.add_taxonomy_set('pres', 'long_pres_name')
        self._tx.add_taxonomy_set('rdt')
        self._tx.add_taxonomy_set('rdt2')
        # map is {<local name>: <granule name or path>}

        self._rdt = RecordDictionaryTool(taxonomy=self._tx)

        self._pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        self._pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        self._pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        self._pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        self._pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        self._pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        self._pdict.add_context(pres_ctxt)

        self._rdt_pdict = RecordDictionaryTool(param_dictionary=self._pdict)
Ejemplo n.º 11
0
from ion.services.dm.utility.granule.taxonomy import TaxyTool
from ion.services.dm.utility.granule.record_dictionary import RecordDictionaryTool
from pyon.util.containers import get_safe
from prototype.sci_data.stream_defs import SBE37_CDM_stream_definition, SBE37_RAW_stream_definition

from prototype.sci_data.stream_parser import PointSupplementStreamParser
from prototype.sci_data.constructor_apis import PointSupplementConstructor, RawSupplementConstructor

import StringIO
from numpy import array, append

# Matplotlib related imports
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure

tx = TaxyTool()
tx.add_taxonomy_set("matplotlib_graphs", "Matplotlib generated graphs for a particular data product")


class VizTransformMatplotlibGraphs(TransformFunction):

    """
    This class is used for instantiating worker processes that have subscriptions to data streams and convert
    incoming data from CDM format to Matplotlib graphs

    """

    # Need to extract the incoming stream def automatically
    outgoing_stream_def = SBE37_RAW_stream_definition()
    incoming_stream_def = SBE37_CDM_stream_definition()
Ejemplo n.º 12
0
class RecordDictionaryTool(object):
    """
    A granule is a unit of information which conveys part of a coverage. It is composed of a taxonomy and a nested
    structure of record dictionaries.

    The record dictionary is composed of named value sequences and other nested record dictionaries.

    The fact that all of the keys in the record dictionary are handles mapped by the taxonomy should never be exposed.
    The application user refers to everything in the record dictionary by the unique nick name from the taxonomy.

    """
    def __init__(self, taxonomy=None, param_dictionary=None, shape=None):
        """
        @brief Initialize a new instance of Record Dictionary Tool with a taxonomy and an optional fixed length
        @param taxonomy is an instance of a TaxonomyTool or Taxonomy (IonObject) used in this record dictionary
        @param length is an optional fixed length for the value sequences of this record dictionary
        """
        if not isinstance(shape, (_NoneType, int, tuple)):
            raise TypeError('Invalid shape argument, received type "%s"; should be None or int or tuple' % type(shape))


        self._rd = {}
        self._shp = shape

        if isinstance(self._shp, int):
            self._shp = (self._shp,)

        # hold onto the taxonomy - we need it to build the granule...

        #Eventually Taxonomy and TaxyTool will go away, to be replaced with ParameterDictionary from coverage-model
        #For now, keep Taxonomy stuff in until everyone can re-work their code.
        if param_dictionary:
            self._param_dict = param_dictionary
            self._tx = None
        elif isinstance(taxonomy, TaxyTool):
            self._tx = taxonomy
            self._param_dict = None
        elif isinstance(taxonomy, Taxonomy):
            self._tx = TaxyTool(taxonomy)
            self._param_dict = None
        else:
            raise TypeError('Invalid taxonomy argument, received type "%s"; should be ParameterDictionary, Taxonomy or TaxyTool' % type(taxonomy))

    @classmethod
    def load_from_granule(cls, g):
        """
        @brief return an instance of Record Dictionary Tool from a granule. Used when a granule is received in a message
        """
        if g.param_dictionary:
            result = cls(param_dictionary=ParameterDictionary.load(g.param_dictionary))

        else:
            result = cls(TaxyTool(g.taxonomy))

        result._rd = g.record_dictionary
        if result._rd.has_key(0):
            result._shp = result._rd[0].shape
        return result

    def __setitem__(self, name, vals):
        """
        Set an item by nick name in the record dictionary
        """

        #This if block is used if RecordDictionaryTool was initialized using a ParameterDictionary.
        #This is the long term solution using the coverage-model.
        if self._param_dict:
            if isinstance(vals, RecordDictionaryTool):
                assert vals._param_dict == self._param_dict
                self._rd[self._param_dict.ord_from_key(param_name=name)] = vals._rd
            elif isinstance(vals, numpy.ndarray):
                #Otherwise it is a value sequence which should have the correct length

                # Matthew says: Assert only equal shape 5/17/12

                if vals.ndim == 0:
                    raise ValueError('The rank of a value sequence array in a record dictionary must be greater than zero. Got name "%s" with rank "%d"' % (name, vals.ndim))

                # Set _shp if it is None
                if self._shp is None:
                    self._shp = vals.shape

#                # Test new value sequence length
#                if self._shp != vals.shape:
#                    raise ValueError('Invalid array shape "%s" for name "%s"; Record dictionary defined shape is "%s"' % (vals.shape, name, self._shp))

                self._rd[self._param_dict.ord_from_key(param_name=name)] = vals

            else:

                raise TypeError('Invalid type "%s" in Record Dictionary Tool setitem with name "%s". Valid types are numpy.ndarray and RecordDictionaryTool' % (type(vals),name))

        #This if block is used if RecordDictionaryTool was initialized using a Taxonomy or TaxyTool
        #This block will eventually be removed.
        if self._tx:
            if isinstance(vals, RecordDictionaryTool):
                assert vals._tx == self._tx
                self._rd[self._tx.get_handle(name)] = vals._rd
            elif isinstance(vals, numpy.ndarray):
                #Otherwise it is a value sequence which should have the correct length

                # Matthew says: Assert only equal shape 5/17/12

                if vals.ndim == 0:
                    raise ValueError('The rank of a value sequence array in a record dictionary must be greater than zero. Got name "%s" with rank "%d"' % (name, vals.ndim))

                # Set _shp if it is None
                if self._shp is None:
                    self._shp = vals.shape

#                # Test new value sequence length
#                if self._shp != vals.shape:
#                    raise ValueError('Invalid array shape "%s" for name "%s"; Record dictionary defined shape is "%s"' % (vals.shape, name, self._shp))

                self._rd[self._tx.get_handle(name)] = vals

            else:

                raise TypeError('Invalid type "%s" in Record Dictionary Tool setitem with name "%s". Valid types are numpy.ndarray and RecordDictionaryTool' % (type(vals),name))

    def __getitem__(self, name):
        """
        Get an item by nick name from the record dictionary.
        """

        #This if block is used if RecordDictionaryTool was initialized using a ParameterDictionary.
        #This is the long term solution using the coverage-model.
        if self._param_dict:
            if isinstance(self._rd[self._param_dict.ord_from_key(param_name=name)], dict):
                result = RecordDictionaryTool(taxonomy=self._param_dict)
                result._rd = self._rd[self._param_dict.ord_from_key(param_name=name)]
                return result
            else:
                return self._rd[self._param_dict.ord_from_key(param_name=name)]

        #This if block is used if RecordDictionaryTool was initialized using a Taxonomy or TaxyTool
        #This block will eventually be removed.
        if self._tx:
            if isinstance(self._rd[self._tx.get_handle(name)], dict):
                result = RecordDictionaryTool(taxonomy=self._tx)
                result._rd = self._rd[self._tx.get_handle(name)]
                return result
            else:
                return self._rd[self._tx.get_handle(name)]

    def iteritems(self):
        """ D.iteritems() -> an iterator over the (key, value) items of D """

        #This if block is used if RecordDictionaryTool was initialized using a ParameterDictionary.
        #This is the long term solution using the coverage-model.
        if self._param_dict:
            for k, v in self._rd.iteritems():
                if isinstance(v, dict):
                    result = RecordDictionaryTool(param_dictionary=self._param_dict)
                    result._rd = v
                    yield self._param_dict.key_from_ord(k), result
                else:
                    yield self._param_dict.key_from_ord(k), v

        #This if block is used if RecordDictionaryTool was initialized using a Taxonomy or TaxyTool
        #This block will eventually be removed.
        if self._tx:
            for k, v in self._rd.iteritems():
                if isinstance(v, dict):
                    result = RecordDictionaryTool(taxonomy=self._tx)
                    result._rd = v
                    yield self._tx.get_nick_name(k), result
                else:
                    yield self._tx.get_nick_name(k), v

    def iterkeys(self):
        """ D.iterkeys() -> an iterator over the keys of D """

        #This if block is used if RecordDictionaryTool was initialized using a ParameterDictionary.
        #This is the long term solution using the coverage-model.
        if self._param_dict:
            for k in self._rd.iterkeys():
                yield self._param_dict.key_from_ord(k)

        #This if block is used if RecordDictionaryTool was initialized using a Taxonomy or TaxyTool
        #This block will eventually be removed.
        if self._tx:
            for k in self._rd.iterkeys():
                yield self._tx.get_nick_name(k)

    def itervalues(self):
        """ D.itervalues() -> an iterator over the values of D """

        #This if block is used if RecordDictionaryTool was initialized using a ParameterDictionary.
        #This is the long term solution using the coverage-model.
        if self._param_dict:
            for v in self._rd.itervalues():
                if isinstance(v, dict):
                    result = RecordDictionaryTool(taxonomy=self._param_dict)
                    result._rd = v
                    yield result
                else:
                    yield v

        #This if block is used if RecordDictionaryTool was initialized using a Taxonomy or TaxyTool
        #This block will eventually be removed.
        if self._tx:
            for v in self._rd.itervalues():
                if isinstance(v, dict):
                    result = RecordDictionaryTool(taxonomy=self._tx)
                    result._rd = v
                    yield result
                else:
                    yield v

    #TJG - This may need to be updated
    def update(self, E=None, **F):
        """
        @brief Dictionary update method exposed for Record Dictionaries
        @param E is another record dictionary
        @param F is a dictionary of nicknames and value sequences
        """
        if E:
            if hasattr(E, "keys"):
                for k in E:
                    self[k] = E[k]
            else:
                for k, v in E.iteritems():
                    self[k] = v

        if F:
            for k in F.keys():
                self[k] = F[k]

    def __contains__(self, nick_name):
        """ D.__contains__(k) -> True if D has a key k, else False """

        handle = ''
        try:
            if self._param_dict:
                handle = self._param_dict.ord_from_key(nick_name)

            if self._tx:
                handle = self._tx.get_handle(nick_name)
        except KeyError as ke:
            # if the nick_name is not in the taxonomy, it is certainly not in the record dictionary
            return False

        return handle in self._rd

    def __delitem__(self, y):
        """ x.__delitem__(y) <==> del x[y] """
        #not sure if this is right, might just have to remove the name, not the whole handle
        if self._param_dict:
            del self._rd[self._param_dict.ord_from_key(y)]

        if self._tx:
            del self._rd[self._tx.get_handle(y)]
        #will probably need to delete the name from _tx

    def __iter__(self):
        """ x.__iter__() <==> iter(x) """
        return self.iterkeys()

    def __len__(self):
        """ x.__len__() <==> len(x) """
        return len(self._rd)

    def __repr__(self):
        """ x.__repr__() <==> repr(x) """
        result = "{"
        for k, v in self.iteritems():
            result += "\'{0}\': {1},".format(k, v)

        if len(result) > 1:
            result = result[:-1] + "}"

        return result

    def __str__(self):
        result = "{"
        for k, v in self.iteritems():
            result += "\'{0}\': {1},".format(k, v)

        if len(result) > 1:
            result = result[:-1] + "}"

        return result

    __hash__ = None

    def pretty_print(self):
        """
        @brief Pretty Print the record dictionary for debug or log purposes.
        """
        pass

#        fid = StringIO.StringIO()
#        # Use string IO inside a try block in case of exceptions or a large return value.
#        try:
#            fid.write('Start Pretty Print Record Dictionary:\n')
#            self._pprint(fid,offset='')
#            fid.write('End of Pretty Print')
#        except Exception, ex:
#            log.exception('Unexpected Exception in Pretty Print Wrapper!')
#            fid.write('Exception! %s' % ex)
#
#        finally:
#            result = fid.getvalue()
#            fid.close()
#
#
#        return result

    def _pprint(self, fid, offset=None):
        pass
import random
import math

from prototype.sci_data.stream_defs import ctd_stream_packet, SBE37_CDM_stream_definition, ctd_stream_definition
from prototype.sci_data.constructor_apis import PointSupplementConstructor

from interface.services.dm.ipubsub_management_service import PubsubManagementServiceClient
from ion.processes.data.ctd_stream_publisher import SimpleCtdPublisher
from ion.services.dm.utility.granule.record_dictionary import RecordDictionaryTool
from ion.services.dm.utility.granule.taxonomy import TaxyTool
from ion.services.dm.utility.granule.granule import build_granule
import numpy


### Taxonomies are defined before hand out of band... somehow.
tx = TaxyTool()
tx.add_taxonomy_set('temp','long name for temp')
tx.add_taxonomy_set('cond','long name for cond')
tx.add_taxonomy_set('lat','long name for latitude')
tx.add_taxonomy_set('lon','long name for longitude')
tx.add_taxonomy_set('pres','long name for pres')
tx.add_taxonomy_set('time','long name for time')
tx.add_taxonomy_set('height','long name for height')



class SinusoidalCtdPublisher(SimpleCtdPublisher):
    def __init__(self, *args, **kwargs):
        super(SinusoidalCtdPublisher, self).__init__(*args,**kwargs)
        #@todo Init stuff
Ejemplo n.º 14
0
def get_taxonomy(stream_name):
    """
    Gets the taxonomy for a given stream.

    @param stream_name Stream name.

    @retval corresponding taxonomy.
    """

    if stream_name in _cache:
        log.debug("taxonomy for stream_name=%s found in cache" % stream_name)
        return _cache[stream_name]

    taxy = None

    # TODO determine and use appropriate mechanism for definition of
    # taxonomies. The following hard-coded definitions are taken from
    # preliminary code in instrument_management_service.py as of 7/15/12.

    if "parsed" == stream_name:
        taxy = TaxyTool()
        taxy.add_taxonomy_set('temp', 't', 'long name for temp')
        taxy.add_taxonomy_set('cond', 'c', 'long name for cond')
        taxy.add_taxonomy_set('pres', 'p', 'long name for pres')
        taxy.add_taxonomy_set('lat','long name for latitude')
        taxy.add_taxonomy_set('lon','long name for longitude')
        taxy.add_taxonomy_set('time','long name for time')
        taxy.add_taxonomy_set('height','long name for height')
        # This is an example of using groups it is not a normative statement about how to use groups
        taxy.add_taxonomy_set('coordinates','This group contains coordinates...')
        taxy.add_taxonomy_set('data','This group contains data...')

    elif "raw" == stream_name:
        taxy = TaxyTool()
        taxy.add_taxonomy_set('blob','bytes in an array')
        taxy.add_taxonomy_set('lat','long name for latitude')
        taxy.add_taxonomy_set('lon','long name for longitude')
        taxy.add_taxonomy_set('time','long name for time')
        taxy.add_taxonomy_set('height','long name for height')
        taxy.add_taxonomy_set('coordinates','This group contains coordinates...')
        taxy.add_taxonomy_set('data','This group contains data...')

    else:
        log.warn("Undefined taxonomy for stream_name=%s" % stream_name)

    if taxy:
        _cache[stream_name] = taxy
        log.debug("created taxonomy for stream_name=%s " % stream_name)

    return taxy
Ejemplo n.º 15
0
    def setUp(self):
        """
        Setup test cases.
        """
        parsed_tx = TaxyTool()
        parsed_tx.add_taxonomy_set('temp', 'long name for temp', 't')
        parsed_tx.add_taxonomy_set('cond', 'long name for cond', 'c')
        parsed_tx.add_taxonomy_set('pres', 'long name for pres', 'd')
        parsed_tx.add_taxonomy_set('lat', 'long name for latitude', 'lt')
        parsed_tx.add_taxonomy_set('lon', 'long name for longitude', 'ln')
        parsed_tx.add_taxonomy_set('time', 'long name for time', 'tm')

        # This is an example of using groups it is not a normative statement about how to use groups
        parsed_tx.add_taxonomy_set('coordinates',
                                   'This group contains coordinates...')
        parsed_tx.add_taxonomy_set('data', 'This group contains data...')

        self._parsed_taxonomy = parsed_tx

        raw_tx = TaxyTool()
        raw_tx.add_taxonomy_set('raw_fixed',
                                'Fixed length bytes in an array of records')
        raw_tx.add_taxonomy_set('raw_blob',
                                'Unlimited length bytes in an array')

        self._raw_taxonomy = raw_tx

        log.debug("_parsed_taxonomy = %s" % self._parsed_taxonomy.dump())
        log.debug("_raw_taxonomy = %s" % self._raw_taxonomy.dump())

        self.packet_factory = PacketFactory.get_packet_factory(
            PacketFactoryType.R2LCAFormat)
        self.assertIsInstance(self.packet_factory, LCAPacketFactory)
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(
            RT.ExternalDatasetModel,
            name="ExampleDatasetModel",
            description="ExampleDatasetModel",
            datset_type="FibSeries",
        )
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s", str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(
            RT.ExternalDatasetAgent,
            name="datasetagent007",
            description="datasetagent007",
            handler_module=EDA_MOD,
            handler_class=EDA_CLS,
        )
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug("TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s", str(datasetAgent_id))

        # Create ExternalDataset
        log.debug("TestExternalDatasetAgentMgmt: Create external dataset resource ")
        extDataset_obj = IonObject(RT.ExternalDataset, name="ExtDataset", description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" % ex)

        log.debug("TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ", str(extDataset_id))

        # register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(extDataset_id)

        # create a stream definition for the data from the ctd simulator

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name("ctd_parsed_param_dict", id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name="SBE37_CDM", parameter_dictionary_id=pdict_id)

        log.debug("TestExternalDatasetAgentMgmt: new Stream Definition id = %s", str(ctd_stream_def_id))

        log.debug("TestExternalDatasetAgentMgmt: Creating new data product with a stream definition")
        dp_obj = IonObject(RT.DataProduct, name="eoi dataset data", description=" stream test")

        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(
            RT.DataProduct, name="DP1", description="some new dp", temporal_domain=tdom, spatial_domain=sdom
        )

        data_product_id1 = self.dpclient.create_data_product(dp_obj, ctd_stream_def_id)

        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s", str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id, data_product_id=data_product_id1)

        # todo fix the problem here....
        self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1, PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s", str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set("data", "external_data")

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG["dh_cfg"] = {
            "TESTING": True,
            "stream_id": stream_id,  # TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            "data_producer_id": dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            "taxonomy": tx.dump(),  # TODO: Currently does not support sets
            "max_records": 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            "driver_config": self.DVR_CONFIG,
            "stream_config": self._stream_config,
            "agent": {"resource_id": EDA_RESOURCE_ID},
            "test_mode": True,
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name="DatasetAgentInstance",
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config,
        )
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id,
        )
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(extDatasetAgentInstance_obj))
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s", str(extDatasetAgentInstance_id))

        # Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ", str(id), str(active)
        )

        self.damsclient.start_external_dataset_agent_instance(extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(extDatasetAgentInstance_id)
        log.debug("TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s", str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ", str(id), str(active)
        )

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id, process=FakeProcess())
        print "TestExternalDatasetAgentMgmt: got ia client %s", self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s", str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command="initialize")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command="go_active")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="pause")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command="resume")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="clear")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="pause")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command="clear")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command="run")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command="reset")
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command="get_current_state")
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        # -------------------------------
        # Deactivate InstrumentAgentInstance
        # -------------------------------
        self.damsclient.stop_external_dataset_agent_instance(extDatasetAgentInstance_id)
Ejemplo n.º 17
0
from pyon.core.exception import BadRequest
from pyon.public import IonObject, RT, log

from datetime import datetime
import numpy
from ion.services.dm.utility.granule.granule import build_granule
from ion.services.dm.utility.granule.taxonomy import TaxyTool
from ion.services.dm.utility.granule.record_dictionary import RecordDictionaryTool
from prototype.sci_data.stream_defs import SBE37_CDM_stream_definition, SBE37_RAW_stream_definition

from prototype.sci_data.stream_parser import PointSupplementStreamParser
from prototype.sci_data.constructor_apis import PointSupplementConstructor, RawSupplementConstructor

from pyon.util.containers import get_safe

tx = TaxyTool()
tx.add_taxonomy_set('google_dt_components','Google datatable')


class VizTransformGoogleDT(TransformFunction):

    """
    This class is used for  converting incoming data from CDM format to JSON style Google DataTables

    Note: One behaviour that this class is expected to achieve specifically is to determine if its supposed
        to work as a realtime transform (exists indefinitely and maintains a sliding window of data) or as
        a replay transform (one-shot).

        [2] This transform behaves as an instantaneous forwarder. There is no waiting for the entire stream
            to create the complete datatable. As the granules come in, they are translated to the datatable
            'components'. Components, because we are not creating the actual datatable in this code. That's the job
Ejemplo n.º 18
0
    def setUp(self):
        """
        Setup test cases.
        """
        parsed_tx = TaxyTool()
        parsed_tx.add_taxonomy_set('temp','long name for temp', 't')
        parsed_tx.add_taxonomy_set('cond','long name for cond', 'c')
        parsed_tx.add_taxonomy_set('pres','long name for pres', 'd')
        parsed_tx.add_taxonomy_set('lat','long name for latitude', 'lt')
        parsed_tx.add_taxonomy_set('lon','long name for longitude', 'ln')
        parsed_tx.add_taxonomy_set('time','long name for time', 'tm')

        # This is an example of using groups it is not a normative statement about how to use groups
        parsed_tx.add_taxonomy_set('coordinates','This group contains coordinates...')
        parsed_tx.add_taxonomy_set('data','This group contains data...')

        self._parsed_taxonomy = parsed_tx

        raw_tx = TaxyTool()
        raw_tx.add_taxonomy_set('raw_fixed','Fixed length bytes in an array of records')
        raw_tx.add_taxonomy_set('raw_blob','Unlimited length bytes in an array')

        self._raw_taxonomy = raw_tx

        log.debug("_parsed_taxonomy = %s" % self._parsed_taxonomy.dump())
        log.debug("_raw_taxonomy = %s" % self._raw_taxonomy.dump())

        self.packet_factory = PacketFactory.get_packet_factory( PacketFactoryType.R2LCAFormat)
        self.assertIsInstance(self.packet_factory, LCAPacketFactory)
Ejemplo n.º 19
0
    def test_activateDatasetAgent(self):

        # Create ExternalDatasetModel
        datsetModel_obj = IonObject(RT.ExternalDatasetModel,
                                    name='ExampleDatasetModel',
                                    description="ExampleDatasetModel",
                                    datset_type="FibSeries")
        try:
            datasetModel_id = self.damsclient.create_external_dataset_model(
                datsetModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetModel: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetModel id = %s",
            str(datasetModel_id))

        # Create ExternalDatasetAgent
        datasetAgent_obj = IonObject(RT.ExternalDatasetAgent,
                                     name='datasetagent007',
                                     description="datasetagent007",
                                     handler_module=EDA_MOD,
                                     handler_class=EDA_CLS)
        try:
            datasetAgent_id = self.damsclient.create_external_dataset_agent(
                datasetAgent_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new ExternalDatasetAgent: %s" % ex)
        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDatasetAgent id = %s",
            str(datasetAgent_id))

        # Create ExternalDataset
        log.debug(
            'TestExternalDatasetAgentMgmt: Create external dataset resource ')
        extDataset_obj = IonObject(RT.ExternalDataset,
                                   name='ExtDataset',
                                   description="ExtDataset")
        try:
            extDataset_id = self.damsclient.create_external_dataset(
                extDataset_obj, datasetModel_id)
        except BadRequest as ex:
            self.fail("failed to create new external dataset resource: %s" %
                      ex)

        log.debug(
            "TestExternalDatasetAgentMgmt: new ExternalDataset id = %s  ",
            str(extDataset_id))

        #register the dataset as a data producer
        dproducer_id = self.damsclient.register_external_data_set(
            extDataset_id)

        # create a stream definition for the data from the ctd simulator

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM', parameter_dictionary_id=pdict_id)

        log.debug(
            "TestExternalDatasetAgentMgmt: new Stream Definition id = %s",
            str(ctd_stream_def_id))

        log.debug(
            "TestExternalDatasetAgentMgmt: Creating new data product with a stream definition"
        )
        dp_obj = IonObject(RT.DataProduct,
                           name='eoi dataset data',
                           description=' stream test')

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        data_product_id1 = self.dpclient.create_data_product(
            dp_obj, ctd_stream_def_id)

        log.debug("TestExternalDatasetAgentMgmt: new dp_id = %s",
                  str(data_product_id1))

        self.damsclient.assign_data_product(input_resource_id=extDataset_id,
                                            data_product_id=data_product_id1)

        #todo fix the problem here....
        self.dpclient.activate_data_product_persistence(
            data_product_id=data_product_id1)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(data_product_id1,
                                                   PRED.hasStream, None, True)
        log.debug("TestExternalDatasetAgentMgmt: Data product streams1 = %s",
                  str(stream_ids))
        stream_id = stream_ids[0]

        # Build a taxonomy for the dataset
        tx = TaxyTool()
        tx.add_taxonomy_set('data', 'external_data')

        # Augment the DVR_CONFIG with the necessary pieces
        self.DVR_CONFIG['dh_cfg'] = {
            'TESTING': True,
            'stream_id':
            stream_id,  #TODO: This should probably be a 'stream_config' dict with stream_name:stream_id members
            'data_producer_id': dproducer_id,
            #            'external_dataset_res':extDataset_obj, # Not needed - retrieved by EDA based on resource_id
            'taxonomy': tx.dump(),  #TODO: Currently does not support sets
            'max_records': 4,
        }

        # Create agent config.
        self._stream_config = {}
        agent_config = {
            'driver_config': self.DVR_CONFIG,
            'stream_config': self._stream_config,
            'agent': {
                'resource_id': EDA_RESOURCE_ID
            },
            'test_mode': True
        }

        extDatasetAgentInstance_obj = IonObject(
            RT.ExternalDatasetAgentInstance,
            name='DatasetAgentInstance',
            description="DatasetAgentInstance",
            dataset_driver_config=self.DVR_CONFIG,
            dataset_agent_config=agent_config)
        extDatasetAgentInstance_id = self.damsclient.create_external_dataset_agent_instance(
            external_dataset_agent_instance=extDatasetAgentInstance_obj,
            external_dataset_agent_id=datasetAgent_id,
            external_dataset_id=extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(extDatasetAgentInstance_obj))
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s",
            str(extDatasetAgentInstance_id))

        #Check that the instance is currently not active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 1 = %s ",
            str(id), str(active))

        self.damsclient.start_external_dataset_agent_instance(
            extDatasetAgentInstance_id)

        dataset_agent_instance_obj = self.damsclient.read_external_dataset_agent_instance(
            extDatasetAgentInstance_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance obj: = %s",
            str(dataset_agent_instance_obj))

        # now the instance process should be active
        id, active = self.damsclient.retrieve_external_dataset_agent_instance(
            extDataset_id)
        log.debug(
            "TestExternalDatasetAgentMgmt: Dataset agent instance id: = %s    active 2 = %s ",
            str(id), str(active))

        # Start a resource agent client to talk with the instrument agent.
        self._dsa_client = ResourceAgentClient(extDataset_id,
                                               process=FakeProcess())
        print 'TestExternalDatasetAgentMgmt: got ia client %s', self._dsa_client
        log.debug("TestExternalDatasetAgentMgmt: got dataset client %s",
                  str(self._dsa_client))

        #        cmd=AgentCommand(command='initialize')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='go_active')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        cmd = AgentCommand(command='run')
        #        _ = self._dsa_client.execute_agent(cmd)
        #
        #        log.info('Send an unconstrained request for data (\'new data\')')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        log.info('Send a second unconstrained request for data (\'new data\'), should be rejected')
        #        cmd = AgentCommand(command='acquire_data')
        #        self._dsa_client.execute(cmd)
        #
        #        cmd = AgentCommand(command='reset')
        #        _ = self._dsa_client.execute_agent(cmd)
        #        cmd = AgentCommand(command='get_current_state')
        #        retval = self._dsa_client.execute_agent(cmd)
        #        state = retval.result

        # TODO: Think about what we really should be testing at this point
        # The following is taken from ion.agents.data.test.test_external_dataset_agent.ExternalDatasetAgentTestBase.test_states()
        # TODO: Do we also need to show data retrieval?
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        cmd = AgentCommand(command='initialize')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='resume')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='pause')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.STOPPED)

        cmd = AgentCommand(command='clear')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)

        cmd = AgentCommand(command='run')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)

        cmd = AgentCommand(command='reset')
        retval = self._dsa_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._dsa_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)

        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.damsclient.stop_external_dataset_agent_instance(
            extDatasetAgentInstance_id)
Ejemplo n.º 20
0
    def setUp(self):
        """
        Setup test cases.
        """
        parsed_tx = TaxyTool()
        parsed_tx.add_taxonomy_set("temp", "long name for temp", "t")
        parsed_tx.add_taxonomy_set("cond", "long name for cond", "c")
        parsed_tx.add_taxonomy_set("pres", "long name for pres", "d")
        parsed_tx.add_taxonomy_set("lat", "long name for latitude", "lt")
        parsed_tx.add_taxonomy_set("lon", "long name for longitude", "ln")
        parsed_tx.add_taxonomy_set("time", "long name for time", "tm")

        # This is an example of using groups it is not a normative statement about how to use groups
        parsed_tx.add_taxonomy_set("coordinates", "This group contains coordinates...")
        parsed_tx.add_taxonomy_set("data", "This group contains data...")

        self._parsed_taxonomy = parsed_tx

        raw_tx = TaxyTool()
        raw_tx.add_taxonomy_set("raw_fixed", "Fixed length bytes in an array of records")
        raw_tx.add_taxonomy_set("raw_blob", "Unlimited length bytes in an array")

        self._raw_taxonomy = raw_tx

        log.debug("_parsed_taxonomy = %s" % self._parsed_taxonomy.dump())
        log.debug("_raw_taxonomy = %s" % self._raw_taxonomy.dump())

        self.packet_factory = PacketFactory.get_packet_factory(PacketFactoryType.R2LCAFormat)
        self.assertIsInstance(self.packet_factory, LCAPacketFactory)
Ejemplo n.º 21
0
def get_taxonomy(stream_name):
    """
    Gets the taxonomy for a given stream.

    @param stream_name Stream name.

    @retval corresponding taxonomy.
    """

    if stream_name in _cache:
        log.debug("taxonomy for stream_name=%s found in cache" % stream_name)
        return _cache[stream_name]

    taxy = None

    # TODO determine and use appropriate mechanism for definition of
    # taxonomies. The following hard-coded definitions are taken from
    # preliminary code in instrument_management_service.py as of 7/15/12.

    if "parsed" == stream_name:
        taxy = TaxyTool()
        taxy.add_taxonomy_set('temp', 't', 'long name for temp')
        taxy.add_taxonomy_set('cond', 'c', 'long name for cond')
        taxy.add_taxonomy_set('pres', 'p', 'long name for pres')
        taxy.add_taxonomy_set('lat', 'long name for latitude')
        taxy.add_taxonomy_set('lon', 'long name for longitude')
        taxy.add_taxonomy_set('time', 'long name for time')
        taxy.add_taxonomy_set('height', 'long name for height')
        # This is an example of using groups it is not a normative statement about how to use groups
        taxy.add_taxonomy_set('coordinates',
                              'This group contains coordinates...')
        taxy.add_taxonomy_set('data', 'This group contains data...')

    elif "raw" == stream_name:
        taxy = TaxyTool()
        taxy.add_taxonomy_set('blob', 'bytes in an array')
        taxy.add_taxonomy_set('lat', 'long name for latitude')
        taxy.add_taxonomy_set('lon', 'long name for longitude')
        taxy.add_taxonomy_set('time', 'long name for time')
        taxy.add_taxonomy_set('height', 'long name for height')
        taxy.add_taxonomy_set('coordinates',
                              'This group contains coordinates...')
        taxy.add_taxonomy_set('data', 'This group contains data...')

    else:
        log.warn("Undefined taxonomy for stream_name=%s" % stream_name)

    if taxy:
        _cache[stream_name] = taxy
        log.debug("created taxonomy for stream_name=%s " % stream_name)

    return taxy
import numpy
import gevent

from prototype.sci_data.stream_defs import ctd_stream_packet, SBE37_CDM_stream_definition, ctd_stream_definition
from prototype.sci_data.constructor_apis import PointSupplementConstructor

from interface.services.dm.ipubsub_management_service import PubsubManagementServiceClient
### For new granule and stream interface
from pyon.ion.transforma import TransformStreamPublisher
from ion.services.dm.utility.granule.record_dictionary import RecordDictionaryTool
from ion.services.dm.utility.granule.taxonomy import TaxyTool
from ion.services.dm.utility.granule.granule import build_granule


### Taxonomies are defined before hand out of band... somehow.
tx = TaxyTool()
tx.add_taxonomy_set('temp','long name for temp')
tx.add_taxonomy_set('cond','long name for cond')
tx.add_taxonomy_set('lat','long name for latitude')
tx.add_taxonomy_set('lon','long name for longitude')
tx.add_taxonomy_set('pres','long name for pres')
tx.add_taxonomy_set('time','long name for time')
tx.add_taxonomy_set('height','long name for height')
tx.add_taxonomy_set('coordinates','This group contains coordinates...')
tx.add_taxonomy_set('data','This group contains data...')


class SimpleCtdPublisher(TransformStreamPublisher):
    def on_start(self):
        exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data')
        self.CFG.process.exchange_point = exchange_point
    def test_createTransformsThenActivateInstrument(self):

        self.loggerpids = []

        # Create InstrumentModel
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel", model="SBE37IMModel" )
        try:
            instModel_id = self.imsclient.create_instrument_model(instModel_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentModel: %s" %ex)
        log.debug( 'new InstrumentModel id = %s ', instModel_id)

        # Create InstrumentAgent
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="ion.agents.instrument.instrument_agent", driver_class="InstrumentAgent" )
        try:
            instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentAgent: %s" %ex)
        log.debug( 'new InstrumentAgent id = %s', instAgent_id)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        # Create InstrumentDevice
        log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ')
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        try:
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
            self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)
        except BadRequest as ex:
            self.fail("failed to create new InstrumentDevice: %s" %ex)

        log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s    (SA Req: L4-CI-SA-RQ-241) ", instDevice_id)

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          driver_module='mi.instrument.seabird.sbe37smb.ooicore.driver', driver_class='SBE37Driver',
                                          comms_device_address='sbe37-simulator.oceanobservatories.org',   comms_device_port=4001,  port_agent_work_dir='/tmp/', port_agent_delimeter=['<<','>>'] )
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)

        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=ctd_stream_def)

        log.debug( 'new Stream Definition id = %s', instDevice_id)

        log.debug( 'Creating new CDM data product with a stream definition')

        craft = CoverageCraft
        sdom, tdom = craft.create_domains()
        sdom = sdom.dump()
        tdom = tdom.dump()
        parameter_dictionary = craft.create_parameters()
        parameter_dictionary = parameter_dictionary.dump()

        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary)
        log.debug( 'new dp_id = %s', ctd_parsed_data_product)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)
        log.debug( 'Data product streams1 = %s', stream_ids)

        pid = self.create_logger('ctd_parsed', stream_ids[0] )
        self.loggerpids.append(pid)
 
        print 'test_createTransformsThenActivateInstrument: Data product streams1 = ', stream_ids

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        log.debug( 'Creating new RAW data product with a stream definition')
        raw_stream_def = SBE37_RAW_stream_definition()
        raw_stream_def_id = self.pubsubclient.create_stream_definition(container=raw_stream_def)

        dp_obj = IonObject(RT.DataProduct,
            name='the raw data',
            description='raw stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id, parameter_dictionary)
        log.debug( 'new dp_id = %s', str(ctd_raw_data_product))

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)
        log.debug( 'Data product streams2 = %s', str(stream_ids))

        #todo: attaching the taxonomy to the stream is a TEMPORARY measure
        # Create taxonomies for both parsed and attach to the stream
        RawTax = TaxyTool()
        RawTax.add_taxonomy_set('raw_fixed','Fixed length bytes in an array of records')
        RawTax.add_taxonomy_set('raw_blob','Unlimited length bytes in an array')

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)
        print 'Data product streams2 = ', stream_ids



        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition ctd_L0_all")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all',
                            process_source='some_source_reference')
        try:
            ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new ctd_L0_all data process definition: %s" %ex)


        #-------------------------------
        # L1 Conductivity: Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1ConductivityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_conductivity',
                            description='create the L1 conductivity data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_conductivity',
                            class_name='CTDL1ConductivityTransform',
                            process_source='CTDL1ConductivityTransform source code here...')
        try:
            ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex)

        #-------------------------------
        # L1 Pressure: Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1PressureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_pressure',
                            description='create the L1 pressure data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_pressure',
                            class_name='CTDL1PressureTransform',
                            process_source='CTDL1PressureTransform source code here...')
        try:
            ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex)


        #-------------------------------
        # L1 Temperature: Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition CTDL1TemperatureTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L1_temperature',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L1_temperature',
                            class_name='CTDL1TemperatureTransform',
                            process_source='CTDL1TemperatureTransform source code here...')
        try:
            ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Salinity: Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_salinity',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
                            class_name='SalinityTransform',
                            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)


        #-------------------------------
        # L2 Density: Data Process Definition
        #-------------------------------
        log.debug("TestIntDataProcessMgmtServiceMultiOut: create data process definition DensityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L2_density',
                            description='create the L1 temperature data product',
                            module='ion.processes.data.transforms.ctd.ctd_L2_density',
                            class_name='DensityTransform',
                            process_source='DensityTransform source code here...')
        try:
            ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except BadRequest as ex:
            self.fail("failed to create new DensityTransform data process definition: %s" %ex)





        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity = L0_conductivity_stream_definition()
        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_conductivity, name='L0_Conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id )

        outgoing_stream_l0_pressure = L0_pressure_stream_definition()
        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_pressure, name='L0_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id )

        outgoing_stream_l0_temperature = L0_temperature_stream_definition()
        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l0_temperature, name='L0_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id )


        self.output_products={}
        log.debug("test_createTransformsThenActivateInstrument: create output data product L0 conductivity")

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                    outgoing_stream_l0_conductivity_id,
                                                                                    parameter_dictionary)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_conductivity_output_dp_id)


        log.debug("test_createTransformsThenActivateInstrument: create output data product L0 pressure")

        ctd_l0_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L0_Pressure',
                                                    description='transform output pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id,
                                                                                    parameter_dictionary)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_pressure_output_dp_id)

        log.debug("test_createTransformsThenActivateInstrument: create output data product L0 temperature")

        ctd_l0_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L0_Temperature',
                                                        description='transform output temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id,
                                                                                    parameter_dictionary)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l0_temperature_output_dp_id)


        #-------------------------------
        # L1 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l1_conductivity = L1_conductivity_stream_definition()
        outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l1_conductivity, name='L1_conductivity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id )

        outgoing_stream_l1_pressure = L1_pressure_stream_definition()
        outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l1_pressure, name='L1_Pressure')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id )

        outgoing_stream_l1_temperature = L1_temperature_stream_definition()
        outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l1_temperature, name='L1_Temperature')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id )

        log.debug("test_createTransformsThenActivateInstrument: create output data product L1 conductivity")

        ctd_l1_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L1_Conductivity',
                                                        description='transform output L1 conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_conductivity_output_dp_obj,
                                                                                        outgoing_stream_l1_conductivity_id,
                                                                                        parameter_dictionary)
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l1_conductivity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(ctd_l1_conductivity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_conductivity', stream_ids[0] )
        self.loggerpids.append(pid)

        log.debug("test_createTransformsThenActivateInstrument: create output data product L1 pressure")

        ctd_l1_pressure_output_dp_obj = IonObject(  RT.DataProduct,
                                                    name='L1_Pressure',
                                                    description='transform output L1 pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_pressure_output_dp_obj,
                                                                                outgoing_stream_l1_pressure_id,
                                                                                parameter_dictionary
        )
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l1_pressure_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(ctd_l1_pressure_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_pressure', stream_ids[0] )
        self.loggerpids.append(pid)

        log.debug("test_createTransformsThenActivateInstrument: create output data product L1 temperature")

        ctd_l1_temperature_output_dp_obj = IonObject(   RT.DataProduct,
                                                        name='L1_Temperature',
                                                        description='transform output L1 temperature',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)

        ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l1_temperature_output_dp_obj,
                                                                                    outgoing_stream_l1_temperature_id,
                                                                                    parameter_dictionary)
        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l1_temperature_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(ctd_l1_temperature_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_temperature', stream_ids[0] )
        self.loggerpids.append(pid)

        #-------------------------------
        # L2 Salinity - Density: Output Data Products
        #-------------------------------

        outgoing_stream_l2_salinity = L2_practical_salinity_stream_definition()
        outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l2_salinity, name='L2_salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id )

        outgoing_stream_l2_density = L2_density_stream_definition()
        outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(container=outgoing_stream_l2_density, name='L2_Density')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id )

        log.debug("test_createTransformsThenActivateInstrument: create output data product L2 Salinity")

        ctd_l2_salinity_output_dp_obj = IonObject(RT.DataProduct,
            name='L2_Salinity',
            description='transform output L2 salinity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l2_salinity_output_dp_id = self.dataproductclient.create_data_product(ctd_l2_salinity_output_dp_obj,
                                                                                outgoing_stream_l2_salinity_id,
                                                                                parameter_dictionary)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l2_salinity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l2_salinity', stream_ids[0] )
        self.loggerpids.append(pid)

        log.debug("test_createTransformsThenActivateInstrument: create output data product L2 Density")

        ctd_l2_density_output_dp_obj = IonObject(   RT.DataProduct,
                                                    name='L2_Density',
                                                    description='transform output pressure',
                                                    temporal_domain = tdom,
                                                    spatial_domain = sdom)

        ctd_l2_density_output_dp_id = self.dataproductclient.create_data_product(ctd_l2_density_output_dp_obj,
                                                                                outgoing_stream_l2_density_id,
                                                                                parameter_dictionary)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_l2_density_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(ctd_l2_density_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l2_density', stream_ids[0] )
        self.loggerpids.append(pid)


        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        log.debug("test_createTransformsThenActivateInstrument: create L0 all data_process start")
        try:
            ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
            self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createTransformsThenActivateInstrument: create L0 all data_process return")


        #-------------------------------
        # L1 Conductivity: Create the data process
        #-------------------------------
        log.debug("test_createTransformsThenActivateInstrument: create L1 Conductivity data_process start")
        try:
            l1_conductivity_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_conductivity_dprocdef_id, [ctd_l0_conductivity_output_dp_id], {'output':ctd_l1_conductivity_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createTransformsThenActivateInstrument: create L1 Conductivity data_process return")


        #-------------------------------
        # L1 Pressure: Create the data process
        #-------------------------------
        log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process start")
        try:
            l1_pressure_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_pressure_dprocdef_id, [ctd_l0_pressure_output_dp_id], {'output':ctd_l1_pressure_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_pressure_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process return")



        #-------------------------------
        # L1 Temperature: Create the data process
        #-------------------------------
        log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process start")
        try:
            l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L1_temperature_dprocdef_id, [ctd_l0_temperature_output_dp_id], {'output':ctd_l1_temperature_output_dp_id})
            self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createTransformsThenActivateInstrument: create L1_Pressure data_process return")



        #-------------------------------
        # L2 Salinity: Create the data process
        #-------------------------------
        log.debug("test_createTransformsThenActivateInstrument: create L2_salinity data_process start")
        try:
            l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_salinity_dprocdef_id, [ctd_parsed_data_product], {'output':ctd_l2_salinity_output_dp_id})
            self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createTransformsThenActivateInstrument: create L2_salinity data_process return")

        #-------------------------------
        # L2 Density: Create the data process
        #-------------------------------
        log.debug("test_createTransformsThenActivateInstrument: create L2_Density data_process start")
        try:
            l2_density_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L2_density_dprocdef_id, [ctd_parsed_data_product], {'output':ctd_l2_density_output_dp_id})
            self.dataprocessclient.activate_data_process(l2_density_all_data_process_id)
        except BadRequest as ex:
            self.fail("failed to create new data process: %s" %ex)

        log.debug("test_createTransformsThenActivateInstrument: create L2_Density data_process return")


        #-------------------------------
        # Launch InstrumentAgentInstance, connect to the resource agent client
        #-------------------------------
        self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)

        inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id)
        print 'test_createTransformsThenActivateInstrument: Instrument agent instance obj: = ', inst_agent_instance_obj

        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = ResourceAgentClient('iaclient', name=inst_agent_instance_obj.agent_process_id,  process=FakeProcess())
        print 'activate_instrument: got ia client %s', self._ia_client
        log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client))


        #-------------------------------
        # Streaming
        #-------------------------------


        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentStream: initialize %s", str(retval))


        log.debug("test_activateInstrumentStream: Sending go_active command (L4-CI-SA-RQ-334)")
        cmd = AgentCommand(command='go_active')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentStream: return value from go_active %s", str(reply))
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("test_activateInstrumentStream: current state after sending go_active command %s    (L4-CI-SA-RQ-334)", str(state))

        cmd = AgentCommand(command='run')
        reply = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("test_activateInstrumentStream: return from run state: %s", str(state))

        # Make sure the sampling rate and transmission are sane.
        params = {
            SBE37Parameter.NAVG : 1,
            SBE37Parameter.INTERVAL : 5,
            SBE37Parameter.TXREALTIME : True
        }
        self._ia_client.set_param(params)


        log.debug("test_activateInstrumentStream: calling go_streaming ")
        cmd = AgentCommand(command='go_streaming')
        reply = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("test_activateInstrumentStream: return from go_streaming state: %s", str(state))


        time.sleep(7)

        log.debug("test_activateInstrumentStream: calling go_observatory")
        cmd = AgentCommand(command='go_observatory')
        reply = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        log.debug("test_activateInstrumentStream: return from go_observatory state  %s", str(state))



        log.debug("test_activateInstrumentStream: calling reset ")
        cmd = AgentCommand(command='reset')
        reply = self._ia_client.execute_agent(cmd)
        log.debug("test_activateInstrumentStream: return from reset state:%s", str(reply.result))
        time.sleep(2)


        #-------------------------------
        # Deactivate InstrumentAgentInstance
        #-------------------------------
        self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id)
        for pid in self.loggerpids:
            self.processdispatchclient.cancel_process(pid)
class RecordDictionaryToolTestCase(unittest.TestCase):

    def setUp(self):

        self._tx = TaxyTool()
        self._tx.add_taxonomy_set('temp', 'long_temp_name')
        self._tx.add_taxonomy_set('cond', 'long_cond_name')
        self._tx.add_taxonomy_set('pres', 'long_pres_name')
        self._tx.add_taxonomy_set('rdt')
        self._tx.add_taxonomy_set('rdt2')
        # map is {<local name>: <granule name or path>}

        self._rdt = RecordDictionaryTool(taxonomy=self._tx)

        self._pdict = ParameterDictionary()

        t_ctxt = ParameterContext('time', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
        t_ctxt.reference_frame = AxisTypeEnum.TIME
        t_ctxt.uom = 'seconds since 01-01-1970'
        self._pdict.add_context(t_ctxt)

        lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lat_ctxt.reference_frame = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        self._pdict.add_context(lat_ctxt)

        lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        lon_ctxt.reference_frame = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        self._pdict.add_context(lon_ctxt)

        temp_ctxt = ParameterContext('temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        temp_ctxt.uom = 'degree_Celsius'
        self._pdict.add_context(temp_ctxt)

        cond_ctxt = ParameterContext('conductivity', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        cond_ctxt.uom = 'unknown'
        self._pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('pres', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
        pres_ctxt.uom = 'unknown'
        self._pdict.add_context(pres_ctxt)

        self._rdt_pdict = RecordDictionaryTool(param_dictionary=self._pdict)

    def test_init(self):

        # initialize with a ParameterDictionary
        rdt = RecordDictionaryTool(param_dictionary=self._pdict)
        self.assertIsInstance(rdt._param_dict, ParameterDictionary)

        # initialize with nonsense
        self.assertRaises(TypeError, RecordDictionaryTool, ['foo', 'barr'])

        # initialize with a valid shape
        rdt = RecordDictionaryTool(param_dictionary=self._pdict, shape=(5,2))
        self.assertEquals(rdt._shp, (5,2))

        rdt = RecordDictionaryTool(param_dictionary=self._pdict, shape=(5,))
        self.assertEquals(rdt._shp, (5,))

        rdt = RecordDictionaryTool(param_dictionary=self._tx, shape=5)
        self.assertEquals(rdt._shp, (5,))

        # initialize with no length
        rdt = RecordDictionaryTool(param_dictionary=self._pdict)
        self.assertEquals(rdt._shp, None)

    def test_init_with_taxonomy(self):

        # initialize with a taxonomy tool
        rdt = RecordDictionaryTool(taxonomy=self._tx)
        self.assertIsInstance(rdt._tx, TaxyTool)

        # initialize with a taxonomy object
        rdt = RecordDictionaryTool(taxonomy=self._tx._t)
        self.assertIsInstance(rdt._tx, TaxyTool)

        # initialize with pooo
        self.assertRaises(TypeError, RecordDictionaryTool, ['foo', 'barr'])

        # initialize with a valid shape
        rdt = RecordDictionaryTool(taxonomy=self._tx, shape=(5,2))
        self.assertEquals(rdt._shp, (5,2))

        rdt = RecordDictionaryTool(taxonomy=self._tx, shape=(5,))
        self.assertEquals(rdt._shp, (5,))

        rdt = RecordDictionaryTool(taxonomy=self._tx, shape=5)
        self.assertEquals(rdt._shp, (5,))

        # initialize with no length
        rdt = RecordDictionaryTool(taxonomy=self._tx)
        self.assertEquals(rdt._shp, None)

    def test_set_and_get(self):
        #make sure you can set and get items in the granule by name in the taxonomy

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)


        self.assertRaises(KeyError, self._rdt.__setitem__, 'long_temp_name', temp_array)
        self.assertRaises(KeyError, self._rdt.__setitem__, 'nonsense', temp_array)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertTrue(numpy.allclose(self._rdt_pdict['temp'], temp_array))
        self.assertTrue(numpy.allclose(self._rdt_pdict['conductivity'], cond_array))
        self.assertTrue(numpy.allclose(self._rdt_pdict['pres'], pres_array))

        #want to check to make sure a KeyError is raised when a non-nickname key is used, but it's not working correctly
        self.assertRaises(KeyError, self._rdt.__getitem__, 'long_temp_name')
        self.assertRaises(KeyError, self._rdt.__getitem__,'nonsense!')

        pdict =self._pdict
        rdt = RecordDictionaryTool(param_dictionary=pdict)
        rdt['temp'] = temp_array

        # Now test bad values... list not numpy array...
        with self.assertRaises(TypeError) as te:
            rdt['temp'] = [1,2,3]

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'list'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
            )

        # Now test numpy scalar array...
        with self.assertRaises(TypeError) as te:
            rdt['temp'] = numpy.float32(3.14159)

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'numpy.float32'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
        )


        # Now test rank zero array...
        with self.assertRaises(ValueError) as te:
            rdt['temp'] = numpy.array(22.5)

        self.assertEquals(
            te.exception.message,
            '''The rank of a value sequence array in a record dictionary must be greater than zero. Got name "temp" with rank "0"'''
        )

        #TODO: Fix record_dictionary shape validation, then put this test back in
#        # Test set invalid shape
#        pres_array = numpy.random.standard_normal(90)
#        with self.assertRaises(ValueError) as te:
#            rdt['pres'] = pres_array
#
#        self.assertEquals(
#            te.exception.message,
#            '''Invalid array shape "(90,)" for name "pres"; Record dictionary defined shape is "(100,)"'''
#        )


        # make a new RDT for testing higher rank objects...
        rdt = RecordDictionaryTool(param_dictionary=self._pdict)

        # Now test rank 2 array...
        rdt['temp'] = numpy.array([[22.5,],])
        self.assertTrue((rdt['temp'] == numpy.array([[22.5,],])).all())

        # Now test rank 2 array...
        rdt['conductivity'] = numpy.array([[28.5,],])
        self.assertTrue((rdt['conductivity'] == numpy.array([[28.5,],])).all())

    def test_set_and_get_with_taxonomy(self):
        #make sure you can set and get items in the granule by name in the taxonomy

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)


        self.assertRaises(KeyError, self._rdt.__setitem__, 'long_temp_name',temp_array)
        self.assertRaises(KeyError, self._rdt.__setitem__, 'nonsense',temp_array)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        self.assertTrue(numpy.allclose(self._rdt['temp'], temp_array))
        self.assertTrue(numpy.allclose(self._rdt['cond'], cond_array))
        self.assertTrue(numpy.allclose(self._rdt['pres'], pres_array))

        #want to check to make sure a KeyError is raised when a non-nickname key is used, but it's not working correctly
        self.assertRaises(KeyError, self._rdt.__getitem__, 'long_temp_name')
        self.assertRaises(KeyError, self._rdt.__getitem__,'nonsense!')

        taxy_tool_obj =self._tx
        rdt = RecordDictionaryTool(taxonomy=taxy_tool_obj)
        rdt['temp'] = temp_array
        self._rdt['rdt'] = rdt

        # Now test when the Record Dictionary Tool is created with the Taxonomy object rather than the TaxyTool
        # This can fail if the == method for TaxyTool is implemented incorrectly

        taxonomy_ion_obj = self._tx._t
        rdt2 = RecordDictionaryTool(taxonomy=taxonomy_ion_obj)
        rdt2['temp'] = temp_array
        self._rdt['rdt2'] = rdt2


        # Now test bad values... list not numpy array...
        with self.assertRaises(TypeError) as te:
            rdt2['temp'] = [1,2,3]

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'list'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
        )

        # Now test numpy scalar array...
        with self.assertRaises(TypeError) as te:
            rdt2['temp'] = numpy.float32(3.14159)

        self.assertEquals(
            te.exception.message,
            '''Invalid type "<type 'numpy.float32'>" in Record Dictionary Tool setitem with name "temp". Valid types are numpy.ndarray and RecordDictionaryTool'''
        )


        # Now test rank zero array...
        with self.assertRaises(ValueError) as te:
            rdt2['temp'] = numpy.array(22.5)

        self.assertEquals(
            te.exception.message,
            '''The rank of a value sequence array in a record dictionary must be greater than zero. Got name "temp" with rank "0"'''
        )

        #TODO: Fix record_dictionary shape validation, then put this test back in
#        # Test set invalid shape
#        pres_array = numpy.random.standard_normal(90)
#        with self.assertRaises(ValueError) as te:
#            rdt2['pres'] = pres_array
#
#        self.assertEquals(
#            te.exception.message,
#            '''Invalid array shape "(90,)" for name "pres"; Record dictionary defined shape is "(100,)"'''
#        )


        # make a new RDT for testing higher rank objects...
        taxy_tool_obj =self._tx
        rdt = RecordDictionaryTool(taxonomy=taxy_tool_obj)

        # Now test rank 2 array...
        rdt['temp'] = numpy.array([[22.5,],])
        self.assertTrue((rdt['temp'] == numpy.array([[22.5,],])).all())

        # Now test rank 2 array...
        rdt['cond'] = numpy.array([[28.5,],])
        self.assertTrue((rdt['cond'] == numpy.array([[28.5,],])).all())


    def test_iteration(self):
        #Test all four iteration methods for items in the granule

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        for k, v in self._rdt_pdict.iteritems():
            if k == 'temp':
                self.assertTrue(numpy.allclose(temp_array, v))
            elif k == 'conductivity':
                self.assertTrue(numpy.allclose(cond_array, v))
            elif k == 'pres':
                self.assertTrue(numpy.allclose(pres_array, v))
            else:
                self.assertTrue(False)

        for k in self._rdt_pdict.iterkeys():
            self.assertTrue(k == 'temp' or k == 'conductivity' or k == 'pres')

        for v in self._rdt_pdict.itervalues():
            self.assertTrue(numpy.allclose(temp_array, v) or numpy.allclose(cond_array, v) or numpy.allclose(pres_array, v))

        for k in self._rdt_pdict:
            self.assertTrue(k == 'temp' or k == 'conductivity' or k == 'pres')

    def test_iteration_with_taxonomy(self):
        #Test all four iteration methods for items in the granule

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        for k, v in self._rdt.iteritems():
            if k == 'temp':
                self.assertTrue(numpy.allclose(temp_array, v))
            elif k == 'cond':
                self.assertTrue(numpy.allclose(cond_array, v))
            elif k == 'pres':
                self.assertTrue(numpy.allclose(pres_array, v))
            else:
                self.assertTrue(False)

        for k in self._rdt.iterkeys():
            self.assertTrue(k == 'temp' or k == 'cond' or k == 'pres')

        for v in self._rdt.itervalues():
            self.assertTrue(numpy.allclose(temp_array, v) or numpy.allclose(cond_array, v) or numpy.allclose(pres_array, v))

        for k in self._rdt:
            self.assertTrue(k == 'temp' or k == 'cond' or k == 'pres')

    def test_update(self):
        # Update this granule with the content of another. Assert that the taxonomies are the same...



        pres_array = numpy.random.standard_normal(100)
        self._rdt_pdict['pres'] = pres_array

        rdt2 = RecordDictionaryTool(param_dictionary=self._pdict)
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)

        rdt2['temp'] = temp_array
        rdt2['conductivity'] = cond_array

        self._rdt_pdict.update(rdt2)

        self.assertIn('pres', self._rdt_pdict)
        self.assertIn('temp', self._rdt_pdict)
        self.assertIn('conductivity', self._rdt_pdict)

        self.assertTrue((self._rdt_pdict['pres'] == pres_array).all())
        self.assertTrue((self._rdt_pdict['conductivity'] == cond_array).all())
        self.assertTrue((self._rdt_pdict['temp'] == temp_array).all())

        self.assertEquals(len(self._rdt_pdict), 3)

    def test_update_with_taxonomy(self):
        # Update this granule with the content of another. Assert that the taxonomies are the same...



        pres_array = numpy.random.standard_normal(100)
        self._rdt['pres'] = pres_array

        rdt2 = RecordDictionaryTool(taxonomy=self._tx)
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)

        rdt2['temp'] = temp_array
        rdt2['cond'] = cond_array

        self._rdt.update(rdt2)

        self.assertIn('pres', self._rdt)
        self.assertIn('temp', self._rdt)
        self.assertIn('cond', self._rdt)

        self.assertTrue((self._rdt['pres'] == pres_array).all())
        self.assertTrue((self._rdt['cond'] == cond_array).all())
        self.assertTrue((self._rdt['temp'] == temp_array).all())

        self.assertEquals(len(self._rdt), 3)

    def test_len(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertEquals(len(self._rdt_pdict), 3)

    def test_len_with_taxonomy(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        self.assertEquals(len(self._rdt), 3)

    def test_repr(self):
        # Come up with a reasonable string representation of the granule for debug purposes only

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array
        self.assertTrue(len(repr(self._rdt_pdict)) > 0)

    def test_repr_with_taxonomy(self):
        # Come up with a reasonable string representation of the granule for debug purposes only

        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array
        self.assertTrue(len(repr(self._rdt)) > 0)

    def test_delete(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertIn('pres', self._rdt_pdict)
        self.assertIn('temp', self._rdt_pdict)
        self.assertIn('conductivity', self._rdt_pdict)

        del self._rdt_pdict['pres']

        self.assertNotIn('pres', self._rdt_pdict)
        self.assertIn('temp', self._rdt_pdict)
        self.assertIn('conductivity', self._rdt_pdict)

    def test_delete_with_taxonomy(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array

        self.assertIn('pres', self._rdt)
        self.assertIn('temp', self._rdt)
        self.assertIn('cond', self._rdt)

        del self._rdt['pres']

        self.assertNotIn('pres', self._rdt)
        self.assertIn('temp', self._rdt)
        self.assertIn('cond', self._rdt)

    def test_contains(self):

        # foobar isn't even in the taxonomy!
        self.assertNotIn('foobar', self._rdt_pdict)

        # Temp is in the ParameterDictionary but not the record dictionary
        self.assertNotIn('temp', self._rdt_pdict)


        # Now put in some data and make sure it works...
        temp_array = numpy.random.standard_normal(100)
        self._rdt_pdict['temp'] = temp_array

        self.assertIn('temp', self._rdt_pdict)

    def test_contains_with_taxonomy(self):

        # foobar isn't even in the taxonomy!
        self.assertNotIn('foobar', self._rdt)

        # Temp is in the taxonomy but not the record dictionary
        self.assertNotIn('temp', self._rdt)


        # Now put in some data and make sure it works...
        temp_array = numpy.random.standard_normal(100)
        self._rdt['temp'] = temp_array

        self.assertIn('temp', self._rdt)

    @unittest.skip('Pretty print not implemented at this time')
    def test_pretty_print(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt_pdict['temp'] = temp_array
        self._rdt_pdict['conductivity'] = cond_array
        self._rdt_pdict['pres'] = pres_array

        self.assertGreater(len(self._rdt_pdict.pretty_print()), 0)

    @unittest.skip('Pretty print not implemented at this time')
    def test_pretty_print_with_taxonomy(self):
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        self._rdt['temp'] = temp_array
        self._rdt['cond'] = cond_array
        self._rdt['pres'] = pres_array


        rdt = RecordDictionaryTool(taxonomy=self._tx)
        rdt['rdt'] = temp_array
        self._rdt['rdt'] = rdt

        self.assertGreater(len(self._rdt.pretty_print()), 0)