Example #1
0
    def test_list_datasets(self):

        hdfdecoder = HDFDecoder(self.known_hdf_as_string)

        datasets = hdfdecoder.list_datasets()

        self.assertIn(self.path_to_dataset, datasets)
    def test_list_datasets(self):

        hdfdecoder = HDFDecoder(self.known_hdf_as_string)

        datasets = hdfdecoder.list_datasets()

        self.assertIn(self.path_to_dataset ,datasets)
    def test_decode_known_and_compare(self):
        """
        Create a decoder and read a numpy array from it
        """

        hdfdecoder = HDFDecoder(self.known_hdf_as_string)
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset)

        # compare the read numpy array to a known value from the stringed input
        self.assertEqual(sha1(nparray.tostring()) ,sha1(self.known_array.tostring()) )
    def test_decode_known_and_compare(self):
        """
        Create a decoder and read a numpy array from it
        """

        hdfdecoder = HDFDecoder(self.known_hdf_as_string)
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset)

        # compare the read numpy array to a known value from the stringed input
        self.assertEqual(sha1(nparray.tostring()) ,sha1(self.known_array.tostring()) )
    def test_encode_decode(self):
        """
        Encode some arrays
        """

        hdfencoder = HDFEncoder() # put array into the encoder
        hdfencoder.add_hdf_dataset(self.path_to_dataset, self.known_array)
        # get the string out from encoder
        hdf_string = hdfencoder.encoder_close()

        # Compare the arrays
        hdfdecoder = HDFDecoder(hdf_string)  # put string in decoder...
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset) # get array out

        self.assertEqual(sha1(nparray.tostring()), sha1(self.known_array.tostring()) ) # works for arbitrarily shaped arrays
    def test_encode_decode(self):
        """
        Encode some arrays
        """

        hdfencoder = HDFEncoder() # put array into the encoder
        hdfencoder.add_hdf_dataset(self.path_to_dataset, self.known_array)
        # get the string out from encoder
        hdf_string = hdfencoder.encoder_close()

        # Compare the arrays
        hdfdecoder = HDFDecoder(hdf_string)  # put string in decoder...
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset) # get array out

        self.assertEqual(sha1(nparray.tostring()), sha1(self.known_array.tostring()) ) # works for arbitrarily shaped arrays
Example #7
0
    def __init__(self, stream_definition=None, stream_granule=None):
        """
        @param stream_granule_container is the incoming packet object defining the point record for this stream
        """

        self._stream_definition = stream_definition

        self._stream_granule = stream_granule

        data_stream_id = stream_granule.data_stream_id
        data_stream = stream_granule.identifiables[data_stream_id]

        hdf_string = data_stream.values

        self._decoder = HDFDecoder(hdf_string)
Example #8
0
class PointSupplementStreamParser(object):

    def __init__(self, stream_definition=None, stream_granule=None):
        """
        @param stream_granule_container is the incoming packet object defining the point record for this stream
        """

        self._stream_definition = stream_definition

        self._stream_granule = stream_granule

        data_stream_id = stream_granule.data_stream_id
        data_stream = stream_granule.identifiables[data_stream_id]

        hdf_string = data_stream.values

        self._decoder = HDFDecoder(hdf_string)


    def get_values(self, field_name=''):

        hdf_path = self._get_hdf_path(field_name)

        try:
            array = self._decoder.read_hdf_dataset(hdf_path)
        except KeyError, ke:
            log.warn('Could not find requested dataset. Datasets present in hdf file: "%s"', self._decoder.list_datasets())
            raise ke

        return array
    def test_decode_encode(self):
        """
        Try a decode-encode sequence and compare if its the same string
        """

        # decode an existing hdf file and read out an array
        hdfdecoder = HDFDecoder(self.known_hdf_as_string) # put known string in decoder...
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset) # get array out

        # encode the array and get the binary string containing the encoded hdf file
        hdfencoder = HDFEncoder() # put the array in the encoder...
        hdfencoder.add_hdf_dataset(self.path_to_dataset, nparray)
        hdf_string = hdfencoder.encoder_close() # get string out

        # compare the two strings
        self.assertEqual(sha1(hdf_string),self.known_hdf_as_sha1)
    def test_decode_encode(self):
        """
        Try a decode-encode sequence and compare if its the same string
        """

        # decode an existing hdf file and read out an array
        hdfdecoder = HDFDecoder(self.known_hdf_as_string) # put known string in decoder...
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset) # get array out

        # encode the array and get the binary string containing the encoded hdf file
        hdfencoder = HDFEncoder() # put the array in the encoder...
        hdfencoder.add_hdf_dataset(self.path_to_dataset, nparray)
        hdf_string = hdfencoder.encoder_close() # get string out

        # compare the two strings
        self.assertEqual(sha1(hdf_string),self.known_hdf_as_sha1)
Example #11
0
class PointSupplementStreamParser(object):
    def __init__(self, stream_definition=None, stream_granule=None):
        """
        @param stream_granule_container is the incoming packet object defining the point record for this stream
        """

        self._stream_definition = stream_definition

        self._stream_granule = stream_granule

        data_stream_id = stream_granule.data_stream_id
        data_stream = stream_granule.identifiables[data_stream_id]

        hdf_string = data_stream.values

        self._decoder = HDFDecoder(hdf_string)

    def get_values(self, field_name=""):

        hdf_path = self._get_hdf_path(field_name)

        try:
            array = self._decoder.read_hdf_dataset(hdf_path)
        except KeyError, ke:
            log.warn(
                'Could not find requested dataset. Datasets present in hdf file: "%s"', self._decoder.list_datasets()
            )
            raise ke

        return array
    def test_encode_with_filename_and_compare(self):
        """
        Create an encoder and add some (one) dataset/array
        """
        testfilename = 'test_encode_with_filename_and_compare'

        hdfencoder = HDFEncoder(testfilename)
        hdfencoder.add_hdf_dataset(self.path_to_dataset, self.known_array)
        # get the string out from encoder
        hdf_string = hdfencoder.encoder_close()

        self.assertEqual(sha1(hdf_string),self.known_hdf_as_sha1)

        hdfdecoder = HDFDecoder(self.known_hdf_as_string)
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset)

        self.assertEqual(sha1(nparray.tostring()), sha1(self.known_array.tostring()) )
Example #13
0
    def test_encode_with_filename_and_compare(self):
        """
        Create an encoder and add some (one) dataset/array
        """
        testfilename = 'test_encode_with_filename_and_compare'

        hdfencoder = HDFEncoder(testfilename)
        hdfencoder.add_hdf_dataset(self.path_to_dataset, self.known_array)
        # get the string out from encoder
        hdf_string = hdfencoder.encoder_close()

        self.assertEqual(sha1(hdf_string),self.known_hdf_as_sha1)

        hdfdecoder = HDFDecoder(self.known_hdf_as_string)
        nparray = hdfdecoder.read_hdf_dataset(self.path_to_dataset)

        self.assertEqual(sha1(nparray.tostring()), sha1(self.known_array.tostring()) )
Example #14
0
    def add_two_datasets_read_compare(self, filename, dataset_name1, dataset_name2):
        array1 = numpy.ones((4,5))
        array2 = numpy.ones((2,3))

        # first create the file
        hdfencoder = HDFEncoder(filename)
        hdfencoder.add_hdf_dataset(dataset_name1, array1)

        hdfencoder.add_hdf_dataset(dataset_name2, array2)
        hdfstring = hdfencoder.encoder_close()

        hdfdecoder = HDFDecoder(hdfstring)
        # Read the first dataset
        array_decoded_1 =  hdfdecoder.read_hdf_dataset(dataset_name1)

        # Read the second dataset
        array_decoded_2 = hdfdecoder.read_hdf_dataset(dataset_name2)

        self.assertEqual(sha1(array1.tostring()), sha1(array_decoded_1.tostring()) )
        self.assertEqual(sha1(array2.tostring()), sha1(array_decoded_2.tostring()) )
    def add_two_datasets_read_compare(self, filename, dataset_name1, dataset_name2):
        array1 = numpy.ones((4,5))
        array2 = numpy.ones((2,3))

        # first create the file
        hdfencoder = HDFEncoder(filename)
        hdfencoder.add_hdf_dataset(dataset_name1, array1)

        hdfencoder.add_hdf_dataset(dataset_name2, array2)
        hdfstring = hdfencoder.encoder_close()

        hdfdecoder = HDFDecoder(hdfstring)
        # Read the first dataset
        array_decoded_1 =  hdfdecoder.read_hdf_dataset(dataset_name1)

        # Read the second dataset
        array_decoded_2 = hdfdecoder.read_hdf_dataset(dataset_name2)

        self.assertEqual(sha1(array1.tostring()), sha1(array_decoded_1.tostring()) )
        self.assertEqual(sha1(array2.tostring()), sha1(array_decoded_2.tostring()) )
Example #16
0
class PointSupplementStreamParser(object):

    def __init__(self, stream_definition=None, stream_granule=None):
        """
        @param stream_granule_container is the incoming packet object defining the point record for this stream
        """

        self._stream_definition = stream_definition

        self._stream_granule = stream_granule

        data_stream_id = stream_granule.data_stream_id
        data_stream = stream_granule.identifiables[data_stream_id]

        hdf_string = data_stream.values

        self._decoder = HDFDecoder(hdf_string)


    def get_values(self, field_name=''):

        hdf_path = self._get_hdf_path(field_name)

        return self._decoder.read_hdf_dataset(hdf_path)

    def _get_hdf_path(self, field_name):

        identifiables = self._stream_definition.identifiables
        # Let the exception buble if this doesn't work...

        #@todo check to make sure this range id is in the stream granule?

        return identifiables[identifiables[field_name].range_id].values_path

    def list_field_names(self):
        """
        Debug method to list the field names in the stream definition

        Currently does not check to see if the range for the field is in this supplement!
        """

        identifiables = self._stream_definition.identifiables

        data_stream_id = self._stream_definition.data_stream_id

        element_type_id = identifiables[data_stream_id].element_type_id

        data_record_id = identifiables[element_type_id].data_record_id

        return identifiables[data_record_id].field_ids
Example #17
0
    def __init__(self, stream_definition=None, stream_granule=None):
        """
        @param stream_granule_container is the incoming packet object defining the point record for this stream
        """

        self._stream_definition = stream_definition

        self._stream_granule = stream_granule

        data_stream_id = stream_granule.data_stream_id
        data_stream = stream_granule.identifiables[data_stream_id]

        hdf_string = data_stream.values

        self._decoder = HDFDecoder(hdf_string)
Example #18
0
hdfencoder.add_hdf_dataset(dataset_name1, array1)
hdfencoder.add_hdf_dataset(dataset_name2, array2)
# Convert all the data to a binary string for easy transportation
hdfstring2 = hdfencoder.encoder_close()

# Create another encoder. This time pass on name of hdf5 file to write
hdfencoder = HDFEncoder('/tmp/testHDFEncoder.hdf5')
hdfencoder.add_hdf_dataset(dname, array3)
# Convert all the data to a binary string for easy transportation
hdfstring3 = hdfencoder.encoder_close()

##########################################################

print('Dataset: %s ' % dataset_name2)
# Create a decoder object
hdfdecoder = HDFDecoder(hdfstring1)
# Read the array out of the decoder
print hdfdecoder.read_hdf_dataset(dataset_name2)

print('Dataset: %s ' % dataset_name1)
# Create a decoder object
hdfdecoder = HDFDecoder(hdfstring2)
# Read the array out of the decoder
print hdfdecoder.read_hdf_dataset(dataset_name1)

#print "Third decoded hdf_string: "
## Create a decoder object
hdfdecoder = HDFDecoder(hdfstring3)
# Read the array out of the decoder
print('Dataset: %s ' % dataset_name1)
print hdfdecoder.read_hdf_dataset(dataset_name1)
Example #19
0
hdfencoder.add_hdf_dataset(dataset_name1, array1)
hdfencoder.add_hdf_dataset(dataset_name2, array2)
# Convert all the data to a binary string for easy transportation
hdfstring2 = hdfencoder.encoder_close()

# Create another encoder. This time pass on name of hdf5 file to write
hdfencoder = HDFEncoder('/tmp/testHDFEncoder.hdf5')
hdfencoder.add_hdf_dataset(dname, array3)
# Convert all the data to a binary string for easy transportation
hdfstring3 = hdfencoder.encoder_close()

##########################################################

print('Dataset: %s ' % dataset_name2)
# Create a decoder object
hdfdecoder = HDFDecoder(hdfstring1)
# Read the array out of the decoder
print hdfdecoder.read_hdf_dataset(dataset_name2)

print('Dataset: %s ' % dataset_name1)
# Create a decoder object
hdfdecoder = HDFDecoder(hdfstring2)
# Read the array out of the decoder
print hdfdecoder.read_hdf_dataset(dataset_name1)

#print "Third decoded hdf_string: "
## Create a decoder object
hdfdecoder = HDFDecoder(hdfstring3)
# Read the array out of the decoder
print('Dataset: %s ' % dataset_name1)
print hdfdecoder.read_hdf_dataset(dataset_name1)