Пример #1
0
def test_rewrite_metadata():
    test_metadata = {'dtype': 'float64',
                     'shape': [1024],
                     'others': [],
                     }
    # assemble the metadata args from the default
    metadata_args = MetadataArgs()
    # avoid checksum and codec
    metadata_args.meta_checksum = 'None'
    metadata_args.meta_codec = 'None'
    # preallocate a fixed size
    metadata_args.max_meta_size = 1000  # fixed preallocation
    target_fp = StringIO()
    # write the metadata section
    _write_metadata(target_fp, test_metadata, metadata_args)
    # check that the length is correct
    nt.assert_equal(METADATA_HEADER_LENGTH + metadata_args.max_meta_size,
                    len(target_fp.getvalue()))

    # now add stuff to the metadata
    test_metadata['container'] = 'numpy'
    test_metadata['data_origin'] = 'LHC'
    # compute the new length
    new_metadata_length = len(SERIALIZERS[0].dumps(test_metadata))
    # jam the new metadata into the StringIO
    target_fp.seek(0, 0)
    _rewrite_metadata_fp(target_fp, test_metadata,
                         codec=None, level=None)
    # now seek back, read the metadata and make sure it has been updated
    # correctly
    target_fp.seek(0, 0)
    result_metadata, result_header = _read_metadata(target_fp)
    nt.assert_equal(test_metadata, result_metadata)
    nt.assert_equal(new_metadata_length, result_header.meta_comp_size)

    # make sure that NoChangeInMetadata is raised
    target_fp.seek(0, 0)
    nt.assert_raises(NoChangeInMetadata, _rewrite_metadata_fp,
                     target_fp, test_metadata, codec=None, level=None)

    # make sure that ChecksumLengthMismatch is raised, needs modified metadata
    target_fp.seek(0, 0)
    test_metadata['fluxcompensator'] = 'back to the future'
    nt.assert_raises(ChecksumLengthMismatch, _rewrite_metadata_fp,
                     target_fp, test_metadata,
                     codec=None, level=None, checksum='sha512')

    # make sure if level is not None, this works
    target_fp.seek(0, 0)
    test_metadata['hoverboard'] = 'back to the future 2'
    _rewrite_metadata_fp(target_fp, test_metadata,
                         codec=None)

    # len of metadata when dumped to json should be around 1105
    for i in range(100):
        test_metadata[str(i)] = str(i)
    target_fp.seek(0, 0)
    nt.assert_raises(MetadataSectionTooSmall, _rewrite_metadata_fp,
                     target_fp, test_metadata, codec=None, level=None)
Пример #2
0
def test_rewrite_metadata():
    test_metadata = {'dtype': 'float64',
                     'shape': [1024],
                     'others': [],
                     }
    # assemble the metadata args from the default
    metadata_args = MetadataArgs()
    # avoid checksum and codec
    metadata_args.meta_checksum = 'None'
    metadata_args.meta_codec = 'None'
    # preallocate a fixed size
    metadata_args.max_meta_size = 1000  # fixed preallocation
    target_fp = StringIO()
    # write the metadata section
    _write_metadata(target_fp, test_metadata, metadata_args)
    # check that the length is correct
    nt.assert_equal(METADATA_HEADER_LENGTH + metadata_args.max_meta_size,
                    len(target_fp.getvalue()))

    # now add stuff to the metadata
    test_metadata['container'] = 'numpy'
    test_metadata['data_origin'] = 'LHC'
    # compute the new length
    new_metadata_length = len(SERIALIZERS[0].dumps(test_metadata))
    # jam the new metadata into the StringIO
    target_fp.seek(0, 0)
    _rewrite_metadata_fp(target_fp, test_metadata,
                         codec=None, level=None)
    # now seek back, read the metadata and make sure it has been updated
    # correctly
    target_fp.seek(0, 0)
    result_metadata, result_header = _read_metadata(target_fp)
    nt.assert_equal(test_metadata, result_metadata)
    nt.assert_equal(new_metadata_length, result_header.meta_comp_size)

    # make sure that NoChangeInMetadata is raised
    target_fp.seek(0, 0)
    nt.assert_raises(NoChangeInMetadata, _rewrite_metadata_fp,
                     target_fp, test_metadata, codec=None, level=None)

    # make sure that ChecksumLengthMismatch is raised, needs modified metadata
    target_fp.seek(0, 0)
    test_metadata['fluxcompensator'] = 'back to the future'
    nt.assert_raises(ChecksumLengthMismatch, _rewrite_metadata_fp,
                     target_fp, test_metadata,
                     codec=None, level=None, checksum='sha512')

    # make sure if level is not None, this works
    target_fp.seek(0, 0)
    test_metadata['hoverboard'] = 'back to the future 2'
    _rewrite_metadata_fp(target_fp, test_metadata,
                         codec=None)

    # len of metadata when dumped to json should be around 1105
    for i in range(100):
        test_metadata[str(i)] = str(i)
    target_fp.seek(0, 0)
    nt.assert_raises(MetadataSectionTooSmall, _rewrite_metadata_fp,
                     target_fp, test_metadata, codec=None, level=None)
Пример #3
0
def test_metadata_opportunisitic_compression():
    # make up some metadata that can be compressed with benefit
    test_metadata = "{'dtype': 'float64', 'shape': [1024], 'others': []," "'original_container': 'carray'}"
    target_fp = StringIO()
    _write_metadata(target_fp, test_metadata, MetadataArgs())
    target_fp.seek(0, 0)
    metadata, header = _read_metadata(target_fp)
    nt.assert_equal("zlib", header["meta_codec"])

    # now do the same thing, but use badly compressible metadata
    test_metadata = "abc"
    target_fp = StringIO()
    # default args say: do compression...
    _write_metadata(target_fp, test_metadata, MetadataArgs())
    target_fp.seek(0, 0)
    metadata, header = _read_metadata(target_fp)
    # but it wasn't of any use
    nt.assert_equal("None", header["meta_codec"])
Пример #4
0
def test_metadata_opportunisitic_compression():
    # make up some metadata that can be compressed with benefit
    test_metadata = ("{'dtype': 'float64', 'shape': [1024], 'others': [],"
                     "'original_container': 'carray'}")
    target_fp = StringIO()
    _write_metadata(target_fp, test_metadata, MetadataArgs())
    target_fp.seek(0, 0)
    metadata, header = _read_metadata(target_fp)
    nt.assert_equal('zlib', header['meta_codec'])

    # now do the same thing, but use badly compressible metadata
    test_metadata = "abc"
    target_fp = StringIO()
    # default args say: do compression...
    _write_metadata(target_fp, test_metadata, MetadataArgs())
    target_fp.seek(0, 0)
    metadata, header = _read_metadata(target_fp)
    # but it wasn't of any use
    nt.assert_equal('None', header['meta_codec'])