def _validate_google_dt_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:

            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                gdt_component = rdt['google_dt_components'][0]

                assertions(gdt_component['viz_product_type'] == 'google_realtime_dt' )
                gdt_description = gdt_component['data_table_description']
                gdt_content = gdt_component['data_table_content']

                assertions(gdt_description[0][0] == 'time')
                assertions(len(gdt_description) > 1)
                assertions(len(gdt_content) >= 0)

        return
    def _validate_google_dt_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:

            if isinstance(g, Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                gdt_component = rdt['google_dt_components'][0]

                assertions(
                    gdt_component['viz_product_type'] == 'google_realtime_dt')
                gdt_description = gdt_component['data_table_description']
                gdt_content = gdt_component['data_table_content']

                assertions(gdt_description[0][0] == 'time')
                assertions(len(gdt_description) > 1)
                assertions(len(gdt_content) >= 0)

        return
Exemple #3
0
    def test_build_granule_and_load_from_granule(self):


        #Define a taxonomy and add sets. add_taxonomy_set takes one or more names and assigns them to one handle
        tx = TaxyTool()
        tx.add_taxonomy_set('temp', 'long_temp_name')
        tx.add_taxonomy_set('cond', 'long_cond_name')
        tx.add_taxonomy_set('pres', 'long_pres_name')
        tx.add_taxonomy_set('rdt')
        # map is {<local name>: <granule name or path>}

        #Use RecordDictionaryTool to create a record dictionary. Send in the taxonomy so the Tool knows what to expect
        rdt = RecordDictionaryTool(taxonomy=tx)

        #Create some arrays and fill them with random values
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        #Use the RecordDictionaryTool to add the values. This also would work if you used long_temp_name, etc.
        rdt['temp'] = temp_array
        rdt['cond'] = cond_array
        rdt['pres'] = pres_array

        #You can also add in another RecordDictionaryTool, providing the taxonomies are the same.
        rdt2 = RecordDictionaryTool(taxonomy=tx)
        rdt2['temp'] = temp_array
        rdt['rdt'] = rdt2


        g = build_granule(data_producer_id='john', taxonomy=tx, record_dictionary=rdt)

        l_tx = TaxyTool.load_from_granule(g)

        l_rd = RecordDictionaryTool.load_from_granule(g)

        # Make sure we got back the same Taxonomy Object
        self.assertEquals(l_tx._t, tx._t)
        self.assertEquals(l_tx.get_handles('temp'), tx.get_handles('temp'))
        self.assertEquals(l_tx.get_handles('testing_2'), tx.get_handles('testing_2'))


        # Now test the record dictionary object
        self.assertEquals(l_rd._rd, rdt._rd)
        self.assertEquals(l_rd._tx._t, rdt._tx._t)


        for k, v in l_rd.iteritems():
            self.assertIn(k, rdt)

            if isinstance(v, numpy.ndarray):
                self.assertTrue( (v == rdt[k]).all())

            else:
                self.assertEquals(v._rd, rdt[k]._rd)
Exemple #4
0
    def test_build_granule_and_load_from_granule(self):

        #Define a taxonomy and add sets. add_taxonomy_set takes one or more names and assigns them to one handle
        tx = TaxyTool()
        tx.add_taxonomy_set('temp', 'long_temp_name')
        tx.add_taxonomy_set('cond', 'long_cond_name')
        tx.add_taxonomy_set('pres', 'long_pres_name')
        tx.add_taxonomy_set('rdt')
        # map is {<local name>: <granule name or path>}

        #Use RecordDictionaryTool to create a record dictionary. Send in the taxonomy so the Tool knows what to expect
        rdt = RecordDictionaryTool(taxonomy=tx)

        #Create some arrays and fill them with random values
        temp_array = numpy.random.standard_normal(100)
        cond_array = numpy.random.standard_normal(100)
        pres_array = numpy.random.standard_normal(100)

        #Use the RecordDictionaryTool to add the values. This also would work if you used long_temp_name, etc.
        rdt['temp'] = temp_array
        rdt['cond'] = cond_array
        rdt['pres'] = pres_array

        #You can also add in another RecordDictionaryTool, providing the taxonomies are the same.
        rdt2 = RecordDictionaryTool(taxonomy=tx)
        rdt2['temp'] = temp_array
        rdt['rdt'] = rdt2

        g = build_granule(data_producer_id='john',
                          taxonomy=tx,
                          record_dictionary=rdt)

        l_tx = TaxyTool.load_from_granule(g)

        l_rd = RecordDictionaryTool.load_from_granule(g)

        # Make sure we got back the same Taxonomy Object
        self.assertEquals(l_tx._t, tx._t)
        self.assertEquals(l_tx.get_handles('temp'), tx.get_handles('temp'))
        self.assertEquals(l_tx.get_handles('testing_2'),
                          tx.get_handles('testing_2'))

        # Now test the record dictionary object
        self.assertEquals(l_rd._rd, rdt._rd)
        self.assertEquals(l_rd._tx._t, rdt._tx._t)

        for k, v in l_rd.iteritems():
            self.assertIn(k, rdt)

            if isinstance(v, numpy.ndarray):
                self.assertTrue((v == rdt[k]).all())

            else:
                self.assertEquals(v._rd, rdt[k]._rd)
Exemple #5
0
def combine_granules(granule_a, granule_b):
    """
    This is a method that combines granules in a very naive manner
    """
    validate_is_instance(granule_a, Granule, "granule_a is not a proper Granule")
    validate_is_instance(granule_b, Granule, "granule_b is not a proper Granule")

    tt_a = TaxyTool.load_from_granule(granule_a)
    tt_b = TaxyTool.load_from_granule(granule_b)

    if tt_a != tt_b:
        raise BadRequest("Can't combine the two granules, they do not have the same taxonomy.")

    rdt_new = RecordDictionaryTool(tt_a)
    rdt_a = RecordDictionaryTool.load_from_granule(granule_a)
    rdt_b = RecordDictionaryTool.load_from_granule(granule_b)

    for k in rdt_a.iterkeys():
        rdt_new[k] = np.append(rdt_a[k], rdt_b[k])
    return build_granule(granule_a.data_producer_id, tt_a, rdt_new)
Exemple #6
0
    def process(self, packet):
        if not isinstance(packet,Granule):
            log.warn('Invalid packet received: Type "%s"' % type(packet))
            return

        rd_in = RecordDictionaryTool.load_from_granule(packet)
        tt = TaxyTool.load_from_granule(packet)

        rd_out = RecordDictionaryTool(tt)
        for nickname, v_sequence in rd_in.iteritems():
            rd_out[nickname] = self.shift(v_sequence)

        g_out = build_granule(data_producer_id='dp_id',taxonomy=tt,record_dictionary=rd_out)
        self.publish(g_out)
Exemple #7
0
def combine_granules(granule_a, granule_b):
    """
    This is a method that combines granules in a very naive manner
    """
    validate_is_instance(granule_a, Granule,
                         'granule_a is not a proper Granule')
    validate_is_instance(granule_b, Granule,
                         'granule_b is not a proper Granule')

    tt_a = TaxyTool.load_from_granule(granule_a)
    tt_b = TaxyTool.load_from_granule(granule_b)

    if tt_a != tt_b:
        raise BadRequest(
            'Can\'t combine the two granules, they do not have the same taxonomy.'
        )

    rdt_new = RecordDictionaryTool(tt_a)
    rdt_a = RecordDictionaryTool.load_from_granule(granule_a)
    rdt_b = RecordDictionaryTool.load_from_granule(granule_b)

    for k in rdt_a.iterkeys():
        rdt_new[k] = np.append(rdt_a[k], rdt_b[k])
    return build_granule(granule_a.data_producer_id, tt_a, rdt_new)
    def _validate_mpl_graphs_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue


        for g in results:
            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                graphs = rdt['matplotlib_graphs']

                for graph in graphs:
                    assertions(graph['viz_product_type'] == 'matplotlib_graphs' )
                    # check to see if the list (numpy array) contians actual images
                    assertions(imghdr.what(graph['image_name'], graph['image_obj']) == 'png')


        return
    def _validate_mpl_graphs_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:
            if isinstance(g, Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                graphs = rdt['matplotlib_graphs']

                for graph in graphs:
                    assertions(
                        graph['viz_product_type'] == 'matplotlib_graphs')
                    # check to see if the list (numpy array) contians actual images
                    assertions(
                        imghdr.what(graph['image_name'], graph['image_obj']) ==
                        'png')

        return
    def _process_visualization_message(self, messages):

        gdt_description = None
        gdt_content = []
        viz_product_type = ''

        for message in messages:

            message_data = message.body

            if isinstance(message_data,Granule):

                tx = TaxyTool.load_from_granule(message_data)
                rdt = RecordDictionaryTool.load_from_granule(message_data)

                gdt_components = get_safe(rdt, 'google_dt_components')


                # IF this granule does not contain google dt, skip
                if gdt_components is None:
                    continue

                gdt_component = gdt_components[0]
                viz_product_type = gdt_component['viz_product_type']

                # Process Google DataTable messages
                if viz_product_type == 'google_dt':

                    # If the data description is being put together for the first time,
                    # switch the time format from float to datetime
                    if (gdt_description == None):
                        temp_gdt_description = gdt_component['data_description']
                        gdt_description = [('time', 'datetime', 'time')]

                        for idx in range(1,len(temp_gdt_description)):
                            # for some weird reason need to force convert to tuples
                            temp_arr = temp_gdt_description[idx]
                            if temp_arr != None:
                                gdt_description.append((temp_arr[0], temp_arr[1], temp_arr[2]))

                    # append all content to one big array
                    temp_gdt_content = gdt_component['data_content']
                    for tempTuple in temp_gdt_content:
                        # sometimes there are inexplicable empty tuples in the content. Drop them
                        if tempTuple == [] or len(tempTuple) == 0:
                            continue

                        varTuple = []
                        varTuple.append(datetime.fromtimestamp(tempTuple[0]))
                        for idx in range(1,len(tempTuple)):
                            varTuple.append(tempTuple[idx])
                        gdt_content.append(varTuple)


                #TODO - what to do if this is not a valid visualization message?


        # Now that all the messages have been parsed, any last processing should be done here
        if viz_product_type == "google_dt":

            # Using the description and content, build the google data table
            gdt = gviz_api.DataTable(gdt_description)
            gdt.LoadData(gdt_content)

            return gdt.ToJSonResponse()

        return None