def test_version_mismatch_file():

    testfile = str(get_test_data_path('version_mismatch.fits'))

    with pytest.warns(None) as w:
        with asdf.open(testfile,
                ignore_version_mismatch=False) as fits_handle:
            assert fits_handle.tree['a'] == complex(0j)
    # This is the warning that we expect from opening the FITS file
    assert len(w) == 1, display_warnings(w)
    assert str(w[0].message) == (
        "'tag:stsci.edu:asdf/core/complex' with version 7.0.0 found in file "
        "'{}', but latest supported version is 1.0.0".format(testfile))

    # Make sure warning does not occur when warning is ignored (default)
    with pytest.warns(None) as w:
        with asdf.open(testfile) as fits_handle:
            assert fits_handle.tree['a'] == complex(0j)
    assert len(w) == 0, display_warnings(w)

    with pytest.warns(None) as w:
        with fits_embed.AsdfInFits.open(testfile,
                ignore_version_mismatch=False) as fits_handle:
            assert fits_handle.tree['a'] == complex(0j)
    assert len(w) == 1
    assert str(w[0].message) == (
        "'tag:stsci.edu:asdf/core/complex' with version 7.0.0 found in file "
        "'{}', but latest supported version is 1.0.0".format(testfile))

    # Make sure warning does not occur when warning is ignored (default)
    with pytest.warns(None) as w:
        with fits_embed.AsdfInFits.open(testfile) as fits_handle:
            assert fits_handle.tree['a'] == complex(0j)
    assert len(w) == 0, display_warnings(w)
def test_no_yaml_end_marker(tmpdir):
    content = b"""#ASDF 1.0.0
%YAML 1.1
%TAG ! tag:stsci.edu:asdf/
--- !core/asdf-1.0.0
foo: bar...baz
baz: 42
    """
    path = os.path.join(str(tmpdir), 'test.asdf')

    buff = io.BytesIO(content)
    with pytest.raises(ValueError):
        with asdf.open(buff):
            pass

    buff.seek(0)
    fd = generic_io.InputStream(buff, 'r')
    with pytest.raises(ValueError):
        with asdf.open(fd):
            pass

    with open(path, 'wb') as fd:
        fd.write(content)

    with open(path, 'rb') as fd:
        with pytest.raises(ValueError):
            with asdf.open(fd):
                pass
Exemple #3
0
def test_extension_version_warning():

    yaml = """
history:
  extensions:
    - !core/extension_metadata-1.0.0
      extension_class: asdf.extension.BuiltinExtension
      software: !core/software-1.0.0
        name: asdf
        version: 100.0.3
    """

    buff = yaml_to_asdf(yaml)
    with pytest.warns(None) as warnings:
        with asdf.open(buff) as af:
            pass

    assert len(warnings) == 1, display_warnings(warnings)
    assert str(warnings[0].message).startswith(
        "File was created with extension 'asdf.extension.BuiltinExtension' "
        "from package asdf-100.0.3")

    buff.seek(0)

    # Make sure suppressing the warning works too
    with pytest.warns(None) as warnings:
        with asdf.open(buff, ignore_missing_extensions=True) as af:
            pass

    assert len(warnings) == 0, display_warnings(warnings)
Exemple #4
0
def test_array_to_stream(tmpdir):
    tree = {
        'stream': np.array([1, 2, 3, 4], np.int64),
    }

    buff = io.BytesIO()
    ff = asdf.AsdfFile(tree)
    ff.set_array_storage(tree['stream'], 'streamed')
    ff.write_to(buff)
    buff.write(np.array([5, 6, 7, 8], np.int64).tostring())

    buff.seek(0)
    ff = asdf.open(generic_io.InputStream(buff))
    assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8])
    buff.seek(0)
    ff2 = asdf.AsdfFile(ff)
    ff2.write_to(buff)
    assert b"shape: ['*']" in buff.getvalue()

    with open(os.path.join(str(tmpdir), 'test.asdf'), 'wb') as fd:
        ff = asdf.AsdfFile(tree)
        ff.set_array_storage(tree['stream'], 'streamed')
        ff.write_to(fd)
        fd.write(np.array([5, 6, 7, 8], np.int64).tostring())

    with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff:
        assert_array_equal(ff.tree['stream'], [1, 2, 3, 4, 5, 6, 7, 8])
        ff2 = asdf.AsdfFile(ff)
        ff2.write_to(buff)
        assert b"shape: ['*']" in buff.getvalue()
Exemple #5
0
def test_custom_validation_with_definitions_bad(tmpdir):
    custom_schema_path = helpers.get_test_data_path('custom_schema_definitions.yaml')
    asdf_file = os.path.join(str(tmpdir), 'out.asdf')

    # This tree does NOT conform to the custom schema
    tree = {
        'forb': { 'biz': 'hello', 'baz': 'world' }
    }

    # Creating file without custom schema should pass
    with asdf.AsdfFile(tree) as ff:
        ff.write_to(asdf_file)

    # Creating file with custom schema should fail
    with pytest.raises(ValidationError):
        with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff:
            pass

    # Opening file without custom schema should pass
    with asdf.open(asdf_file) as ff:
        pass

    # Opening file with custom schema should fail
    with pytest.raises(ValidationError):
        with asdf.open(asdf_file, custom_schema=custom_schema_path) as ff:
            pass
def test_no_asdf_blocks(tmpdir):
    content = b"""#ASDF 1.0.0
%YAML 1.1
%TAG ! tag:stsci.edu:asdf/
--- !core/asdf-1.0.0
foo: bar
...
XXXXXXXX
    """

    path = os.path.join(str(tmpdir), 'test.asdf')

    buff = io.BytesIO(content)
    with asdf.open(buff) as ff:
        assert len(ff.blocks) == 0

    buff.seek(0)
    fd = generic_io.InputStream(buff, 'r')
    with asdf.open(fd) as ff:
        assert len(ff.blocks) == 0

    with open(path, 'wb') as fd:
        fd.write(content)

    with open(path, 'rb') as fd:
        with asdf.open(fd) as ff:
            assert len(ff.blocks) == 0
Exemple #7
0
def test_fill_and_remove_defaults():
    class DefaultType(dict, types.CustomType):
        name = 'default'
        organization = 'nowhere.org'
        version = (1, 0, 0)
        standard = 'custom'

    class DefaultTypeExtension(CustomExtension):
        @property
        def types(self):
            return [DefaultType]

    yaml = """
custom: !<tag:nowhere.org:custom/default-1.0.0>
  b: {}
    """
    buff = helpers.yaml_to_asdf(yaml)
    with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff:
        assert 'a' in ff.tree['custom']
        assert ff.tree['custom']['a'] == 42
        assert ff.tree['custom']['b']['c'] == 82

    buff.seek(0)
    with asdf.open(buff, extensions=[DefaultTypeExtension()],
                            do_not_fill_defaults=True) as ff:
        assert 'a' not in ff.tree['custom']
        assert 'c' not in ff.tree['custom']['b']
        ff.fill_defaults()
        assert 'a' in ff.tree['custom']
        assert ff.tree['custom']['a'] == 42
        assert 'c' in ff.tree['custom']['b']
        assert ff.tree['custom']['b']['c'] == 82
        ff.remove_defaults()
        assert 'a' not in ff.tree['custom']
        assert 'c' not in ff.tree['custom']['b']
Exemple #8
0
def test_readonly(tmpdir):

    tmpfile = str(tmpdir.join('data.asdf'))
    tree = dict(data=np.ndarray((100)))

    with asdf.AsdfFile(tree) as af:
        # Make sure we're actually writing to an internal array for this test
        af.write_to(tmpfile, all_array_storage='internal')

    # Opening in read mode (the default) should mean array is readonly
    with asdf.open(tmpfile) as af:
        assert af['data'].flags.writeable == False
        with pytest.raises(ValueError) as err:
            af['data'][0] = 41
            assert str(err) == 'assignment destination is read-only'

    # This should be perfectly fine
    with asdf.open(tmpfile, mode='rw') as af:
        assert af['data'].flags.writeable == True
        af['data'][0] = 40

    # Copying the arrays makes it safe to write to the underlying array
    with asdf.open(tmpfile, mode='r', copy_arrays=True) as af:
        assert af['data'].flags.writeable == True
        af['data'][0] = 42
def test_no_final_newline(tmpdir):
    content = b"""#ASDF 1.0.0
%YAML 1.1
%TAG ! tag:stsci.edu:asdf/
--- !core/asdf-1.0.0
foo: ...bar...
baz: 42
..."""
    path = os.path.join(str(tmpdir), 'test.asdf')

    buff = io.BytesIO(content)
    with asdf.open(buff) as ff:
        assert len(ff.tree) == 2

    buff.seek(0)
    fd = generic_io.InputStream(buff, 'r')
    with asdf.open(fd) as ff:
        assert len(ff.tree) == 2

    with open(path, 'wb') as fd:
        fd.write(content)

    with open(path, 'rb') as fd:
        with asdf.open(fd) as ff:
            assert len(ff.tree) == 2
Exemple #10
0
def diff(filenames, minimal, iostream=sys.stdout):
    """Top-level implementation of diff algorithm"""
    try:
        with asdf.open(filenames[0], _force_raw_types=True) as asdf0:
            with asdf.open(filenames[1], _force_raw_types=True) as asdf1:
                diff_ctx = DiffContext(asdf0, asdf1, iostream, minimal=minimal)
                compare_trees(diff_ctx, asdf0.tree, asdf1.tree)
    except ValueError as error:
        raise RuntimeError(str(error))
Exemple #11
0
def test_supported_versions():
    class CustomFlow:
        def __init__(self, c=None, d=None):
            self.c = c
            self.d = d

    class CustomFlowType(types.CustomType):
        version = '1.1.0'
        supported_versions = [(1,0,0), versioning.AsdfSpec('>=1.1.0')]
        name = 'custom_flow'
        organization = 'nowhere.org'
        standard = 'custom'
        types = [CustomFlow]

        @classmethod
        def from_tree(cls, tree, ctx):
            # Convert old schema to new CustomFlow type
            if cls.version == '1.0.0':
                return CustomFlow(c=tree['a'], d=tree['b'])
            else:
                return CustomFlow(**tree)
            return CustomFlow(**kwargs)

        @classmethod
        def to_tree(cls, data, ctx):
            if cls.version == '1.0.0':
                tree = dict(a=data.c, b=data.d)
            else:
                tree = dict(c=data.c, d=data.d)

    class CustomFlowExtension(CustomExtension):
        @property
        def types(self):
            return [CustomFlowType]

    new_yaml = """
flow_thing:
  !<tag:nowhere.org:custom/custom_flow-1.1.0>
    c: 100
    d: 3.14
"""
    old_yaml = """
flow_thing:
  !<tag:nowhere.org:custom/custom_flow-1.0.0>
    a: 100
    b: 3.14
"""
    new_buff = helpers.yaml_to_asdf(new_yaml)
    new_data = asdf.open(new_buff, extensions=CustomFlowExtension())
    assert type(new_data.tree['flow_thing']) == CustomFlow

    old_buff = helpers.yaml_to_asdf(old_yaml)
    old_data = asdf.open(old_buff, extensions=CustomFlowExtension())
    assert type(old_data.tree['flow_thing']) == CustomFlow
def test_recompression(tmpdir):
    tree = _get_large_tree()
    tmpfile = os.path.join(str(tmpdir), 'test1.asdf')
    afile = asdf.AsdfFile(tree)
    afile.write_to(tmpfile, all_array_compression='zlib')
    afile.close()
    afile = asdf.open(tmpfile)
    tmpfile = os.path.join(str(tmpdir), 'test2.asdf')
    afile.write_to(tmpfile, all_array_compression='bzp2')
    afile.close()
    afile = asdf.open(tmpfile)
    helpers.assert_tree_match(tree, afile.tree)
    afile.close()
def test_open_gzipped():
    testfile = get_test_data_path('asdf.fits.gz')

    # Opening as an HDU should work
    with fits.open(testfile) as ff:
        with asdf.open(ff) as af:
            assert af.tree['stuff'].shape == (20, 20)

    with fits_embed.AsdfInFits.open(testfile) as af:
        assert af.tree['stuff'].shape == (20, 20)

    with asdf.open(testfile) as af:
        assert af.tree['stuff'].shape == (20, 20)
def test_input(tmpdir):
    tree = _get_large_tree()
    tmpfile = os.path.join(str(tmpdir), 'test1.asdf')
    afile = asdf.AsdfFile(tree)
    afile.write_to(tmpfile, all_array_compression='zlib')
    afile.close()
    afile = asdf.open(tmpfile)
    tmpfile = os.path.join(str(tmpdir), 'test2.asdf')
    afile.write_to(tmpfile)
    afile.close()
    afile = asdf.open(tmpfile)
    helpers.assert_tree_match(tree, afile.tree)
    assert afile.get_array_compression(afile.tree['science_data']) == 'zlib'
    afile.close()
def test_empty_file():
    buff = io.BytesIO(b"#ASDF 1.0.0\n")
    buff.seek(0)

    with asdf.open(buff) as ff:
        assert ff.tree == {}
        assert len(ff.blocks) == 0

    buff = io.BytesIO(b"#ASDF 1.0.0\n#ASDF_STANDARD 1.0.0")
    buff.seek(0)

    with asdf.open(buff) as ff:
        assert ff.tree == {}
        assert len(ff.blocks) == 0
def test_not_asdf_file():
    buff = io.BytesIO(b"SIMPLE")
    buff.seek(0)

    with pytest.raises(ValueError):
        with asdf.open(buff):
            pass

    buff = io.BytesIO(b"SIMPLE\n")
    buff.seek(0)

    with pytest.raises(ValueError):
        with asdf.open(buff):
            pass
def test_no_asdf_header(tmpdir):
    content = b"What? This ain't no ASDF file"

    path = os.path.join(str(tmpdir), 'test.asdf')

    buff = io.BytesIO(content)
    with pytest.raises(ValueError):
        asdf.open(buff)

    with open(path, 'wb') as fd:
        fd.write(content)

    with open(path, 'rb') as fd:
        with pytest.raises(ValueError):
            asdf.open(fd)
Exemple #18
0
def test_version_mismatch():
    yaml = """
a: !core/complex-42.0.0
  0j
    """

    buff = helpers.yaml_to_asdf(yaml)
    with pytest.warns(None) as warning:
        with asdf.open(buff, ignore_version_mismatch=False) as ff:
            assert isinstance(ff.tree['a'], complex)

    assert len(warning) == 1
    assert str(warning[0].message) == (
        "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file, "
        "but latest supported version is 1.0.0")

    # Make sure warning is repeatable
    buff.seek(0)
    with pytest.warns(None) as warning:
        with asdf.open(buff, ignore_version_mismatch=False) as ff:
            assert isinstance(ff.tree['a'], complex)

    assert len(warning) == 1
    assert str(warning[0].message) == (
        "'tag:stsci.edu:asdf/core/complex' with version 42.0.0 found in file, "
        "but latest supported version is 1.0.0")

    # Make sure the warning does not occur if it is being ignored (default)
    buff.seek(0)
    with pytest.warns(None) as warning:
        with asdf.open(buff) as ff:
            assert isinstance(ff.tree['a'], complex)

    assert len(warning) == 0, helpers.display_warnings(warning)


    # If the major and minor match, there should be no warning.
    yaml = """
a: !core/complex-1.0.1
  0j
    """

    buff = helpers.yaml_to_asdf(yaml)
    with pytest.warns(None) as warning:
        with asdf.open(buff, ignore_version_mismatch=False) as ff:
            assert isinstance(ff.tree['a'], complex)

    assert len(warning) == 0
Exemple #19
0
def test_unsupported_version_warning():
    class CustomFlow:
        pass

    class CustomFlowType(types.CustomType):
        version = '1.0.0'
        supported_versions = [(1,0,0)]
        name = 'custom_flow'
        organization = 'nowhere.org'
        standard = 'custom'
        types = [CustomFlow]

    class CustomFlowExtension(CustomExtension):
        @property
        def types(self):
            return [CustomFlowType]

    yaml = """
flow_thing:
  !<tag:nowhere.org:custom/custom_flow-1.1.0>
    c: 100
    d: 3.14
"""
    buff = helpers.yaml_to_asdf(yaml)

    with pytest.warns(None) as _warnings:
        data = asdf.open(buff, extensions=CustomFlowExtension())

    assert len(_warnings) == 1
    assert str(_warnings[0].message) == (
        "Version 1.1.0 of tag:nowhere.org:custom/custom_flow is not compatible "
        "with any existing tag implementations")
Exemple #20
0
def test_to_yaml(tmpdir):
    x = np.arange(0, 10, dtype=np.float)

    tree = {
        'science_data': x,
        'subset': x[3:-3],
        'skipping': x[::2],
        'not_shared': np.arange(10, 0, -1, dtype=np.uint8)
        }

    path = os.path.join(str(tmpdir), 'original.asdf')
    ff = AsdfFile(tree)
    ff.write_to(path)
    assert len(ff.blocks) == 2

    result = main.main_from_args(['to_yaml', path])

    assert result == 0

    files = get_file_sizes(str(tmpdir))

    assert 'original.asdf' in files
    assert 'original.yaml' in files

    with asdf.open(os.path.join(str(tmpdir), 'original.yaml')) as ff:
        assert_tree_match(ff.tree, tree)
        assert len(list(ff.blocks.internal_blocks)) == 0
Exemple #21
0
def test_extract(tmpdir):
    hdulist = HDUList()

    image = ImageHDU(np.random.random((25, 25)))
    hdulist.append(image)

    tree = {
        'some_words': 'These are some words',
        'nested': {
            'a': 100,
            'b': 42
        },
        'list': [x for x in range(10)],
        'image': image.data
    }

    asdf_in_fits = str(tmpdir.join('asdf.fits'))
    with AsdfInFits(hdulist, tree) as aif:
        aif.write_to(asdf_in_fits)

    pure_asdf = str(tmpdir.join('extract.asdf'))
    extract.extract_file(asdf_in_fits, pure_asdf)

    assert os.path.exists(pure_asdf)

    with asdf.open(pure_asdf) as af:
        assert not isinstance(af, AsdfInFits)
        assert_tree_match(tree, af.tree)
Exemple #22
0
def test_type_missing_dependencies():

    class MissingType(types.CustomType):
        name = 'missing'
        organization = 'nowhere.org'
        version = (1, 1, 0)
        standard = 'custom'
        types = ['asdfghjkl12345.foo']
        requires = ["ASDFGHJKL12345"]

    class DefaultTypeExtension(CustomExtension):
        @property
        def types(self):
            return [MissingType]

    yaml = """
custom: !<tag:nowhere.org:custom/missing-1.1.0>
  b: {foo: 42}
    """
    buff = helpers.yaml_to_asdf(yaml)
    with pytest.warns(None) as w:
        with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff:
            assert ff.tree['custom']['b']['foo'] == 42

    assert len(w) == 1
def test_junk_file():
    buff = io.BytesIO(b"#ASDF 1.0.0\nFOO")
    buff.seek(0)

    with pytest.raises(ValueError):
        with asdf.open(buff):
            pass
Exemple #24
0
    def load_file(self, filepath):
        image = AstroImage.AstroImage(logger=self.logger)
        with asdf.open(filepath) as asdf_f:
            image.load_asdf(asdf_f)

        self.fitsimage.set_image(image)
        self.setWindowTitle(filepath)
def test_invalid_source(small_tree):
    buff = io.BytesIO()

    ff = asdf.AsdfFile(small_tree)
    # Since we're testing with small arrays, force all arrays to be stored
    # in internal blocks rather than letting some of them be automatically put
    # inline.
    ff.write_to(buff, all_array_storage='internal')

    buff.seek(0)
    with asdf.open(buff) as ff2:
        ff2.blocks.get_block(0)

        with pytest.raises(ValueError):
            ff2.blocks.get_block(2)

        with pytest.raises(IOError):
            ff2.blocks.get_block("http://127.0.0.1/")

        with pytest.raises(TypeError):
            ff2.blocks.get_block(42.0)

        with pytest.raises(ValueError):
            ff2.blocks.get_source(42.0)

        block = ff2.blocks.get_block(0)
        assert ff2.blocks.get_source(block) == 0
Exemple #26
0
def test_stream_real_file(tmpdir):
    path = os.path.join(str(tmpdir), 'test.asdf')

    tree = {
        'nonstream': np.array([1, 2, 3, 4], np.int64),
        'stream': stream.Stream([6, 2], np.float64)
    }

    with open(path, 'wb') as fd:
        ff = asdf.AsdfFile(tree)
        # Since we're testing with small arrays, force this array to be stored
        # in an internal block rather than letting it be automatically put
        # inline.
        ff.set_array_storage(ff['nonstream'], 'internal')
        ff.write_to(fd)
        for i in range(100):
            fd.write(np.array([i] * 12, np.float64).tostring())

    with asdf.open(path) as ff:
        assert len(ff.blocks) == 1
        assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64))
        assert ff.tree['stream'].shape == (100, 6, 2)
        assert len(ff.blocks) == 2
        for i, row in enumerate(ff.tree['stream']):
            assert np.all(row == i)
def _test_defragment(tmpdir, codec):
    x = np.arange(0, 1000, dtype=np.float)

    tree = {
        'science_data': x,
        'subset': x[3:-3],
        'skipping': x[::2],
        'not_shared': np.arange(100, 0, -1, dtype=np.uint8)
        }

    path = os.path.join(str(tmpdir), 'original.asdf')
    out_path = os.path.join(str(tmpdir), 'original.defragment.asdf')
    ff = AsdfFile(tree)
    ff.write_to(path)
    assert len(ff.blocks) == 2

    result = main.main_from_args(
        ['defragment', path, '-o', out_path, '-c', codec])

    assert result == 0

    files = get_file_sizes(str(tmpdir))

    assert 'original.asdf' in files
    assert 'original.defragment.asdf' in files

    assert files['original.defragment.asdf'] < files['original.asdf']

    with asdf.open(os.path.join(str(tmpdir), 'original.defragment.asdf')) as ff:
        assert_tree_match(ff.tree, tree)
        assert len(list(ff.blocks.internal_blocks)) == 2
Exemple #28
0
def test_version_mismatch_with_supported_versions():
    """Make sure that defining the supported_versions field does not affect
    whether or not schema mismatch warnings are triggered."""

    class CustomFlow:
        pass

    class CustomFlowType(CustomTestType):
        version = '1.1.0'
        supported_versions = ['1.0.0', '1.1.0']
        name = 'custom_flow'
        organization = 'nowhere.org'
        standard = 'custom'
        types = [CustomFlow]

    class CustomFlowExtension(CustomExtension):
        @property
        def types(self):
            return [CustomFlowType]

    yaml = """
flow_thing:
  !<tag:nowhere.org:custom/custom_flow-1.0.0>
    c: 100
    d: 3.14
"""
    buff = helpers.yaml_to_asdf(yaml)
    with pytest.warns(None) as w:
        data = asdf.open(
            buff, ignore_version_mismatch=False,
            extensions=CustomFlowExtension())
    assert len(w) == 1, helpers.display_warnings(w)
    assert str(w[0].message) == (
        "'tag:nowhere.org:custom/custom_flow' with version 1.0.0 found in "
        "file, but latest supported version is 1.1.0")
Exemple #29
0
def test_stream_with_nonstream():
    buff = io.BytesIO()

    tree = {
        'nonstream': np.array([1, 2, 3, 4], np.int64),
        'stream': stream.Stream([6, 2], np.float64)
    }

    ff = asdf.AsdfFile(tree)
    # Since we're testing with small arrays, force this array to be stored in
    # an internal block rather than letting it be automatically put inline.
    ff.set_array_storage(ff['nonstream'], 'internal')
    ff.write_to(buff)
    for i in range(100):
        buff.write(np.array([i] * 12, np.float64).tostring())

    buff.seek(0)

    with asdf.open(buff) as ff:
        assert len(ff.blocks) == 1
        assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64))
        assert ff.tree['stream'].shape == (100, 6, 2)
        assert len(ff.blocks) == 2
        for i, row in enumerate(ff.tree['stream']):
            assert np.all(row == i)
Exemple #30
0
def test_history_to_file(tmpdir):

    tmpfile = str(tmpdir.join('history.asdf'))

    with asdf.AsdfFile() as ff:
        ff.add_history_entry('This happened',
                             {'name': 'my_tool',
                              'homepage': 'http://nowhere.org',
                              'author': 'John Doe',
                              'version': '2.0'})
        ff.write_to(tmpfile)

    with asdf.open(tmpfile) as ff:
        assert 'entries' in ff.tree['history']
        assert 'extensions' in ff.tree['history']
        assert len(ff.tree['history']['entries']) == 1

        entry = ff.tree['history']['entries'][0]
        assert entry['description'] == 'This happened'
        assert entry['software']['name'] == 'my_tool'
        assert entry['software']['version'] == '2.0'

        # Test the history entry retrieval API
        entries = ff.get_history_entries()
        assert len(entries) == 1
        assert isinstance(entries, list)
        assert isinstance(entries[0], HistoryEntry)
        assert entries[0]['description'] == "This happened"
        assert entries[0]['software']['name'] == 'my_tool'
Exemple #31
0
def test_extension_metadata(tmpdir):

    ff = asdf.AsdfFile()
    # So far only the base extension has been used
    assert len(ff.type_index.get_extensions_used()) == 1

    tmpfile = str(tmpdir.join('extension.asdf'))
    ff.write_to(tmpfile)

    with asdf.open(tmpfile) as af:
        assert len(af.tree['history']['extensions']) == 1
        metadata = af.tree['history']['extensions'][0]
        assert metadata.extension_class == 'asdf.extension.BuiltinExtension'
        # Don't bother with testing the version here since it will depend on
        # how recently the package was built (version is auto-generated)
        assert metadata.software['name'] == 'asdf'
Exemple #32
0
def test_strict_extension_check():

    yaml = """
history:
  extensions:
    - !core/extension_metadata-1.0.0
      extension_class: foo.bar.FooBar
      software: !core/software-1.0.0
        name: foo
        version: 1.2.3
    """

    buff = yaml_to_asdf(yaml)
    with pytest.raises(RuntimeError):
        with asdf.open(buff, strict_extension_check=True) as af:
            pass
Exemple #33
0
def test_tagging_scalars():
    yaml = """
unit: !unit/unit-1.0.0
  m
not_unit:
  m
    """
    from astropy import units as u

    buff = helpers.yaml_to_asdf(yaml)
    with asdf.open(buff) as ff:
        assert isinstance(ff.tree['unit'], u.UnitBase)
        assert not isinstance(ff.tree['not_unit'], u.UnitBase)
        assert isinstance(ff.tree['not_unit'], str)

        assert ff.tree == {'unit': u.m, 'not_unit': 'm'}
Exemple #34
0
def test_no_blocks_increase_size(tmp_path, create_editor, version):
    file_path = str(tmp_path / "test.asdf")

    with asdf.AsdfFile(version=version) as af:
        af["foo"] = "bar"
        af.write_to(file_path)

    new_value = "a" * 32768
    os.environ["EDITOR"] = create_editor(r"foo: bar", f"foo: {new_value}")

    # With no blocks, we can expand the existing file, so this case
    # shouldn't require confirmation from the user.
    assert main.main_from_args(["edit", file_path]) == 0

    with asdf.open(file_path) as af:
        assert af["foo"] == new_value
Exemple #35
0
def test_set_array_compression(tmpdir):

    tmpfile = os.path.join(str(tmpdir), 'compressed.asdf')

    zlib_data = np.array([x for x in range(1000)])
    bzp2_data = np.array([x for x in range(1000)])

    tree = dict(zlib_data=zlib_data, bzp2_data=bzp2_data)
    with asdf.AsdfFile(tree) as af_out:
        af_out.set_array_compression(zlib_data, 'zlib')
        af_out.set_array_compression(bzp2_data, 'bzp2')
        af_out.write_to(tmpfile)

    with asdf.open(tmpfile) as af_in:
        assert af_in.get_array_compression(af_in.tree['zlib_data']) == 'zlib'
        assert af_in.get_array_compression(af_in.tree['bzp2_data']) == 'bzp2'
Exemple #36
0
def test_read_large_literal():

    value = 1 << 64
    yaml = """integer: {}""".format(value)

    buff = helpers.yaml_to_asdf(yaml)

    with pytest.warns(UserWarning) as w:
        with asdf.open(buff) as af:
            assert af['integer'] == value

        # We get two warnings: one for validation time, and one when defaults
        # are filled. It seems like we could improve this architecture, though...
        assert len(w) == 2
        assert str(w[0].message).startswith('Invalid integer literal value')
        assert str(w[1].message).startswith('Invalid integer literal value')
Exemple #37
0
def test_array_view(tmp_path):
    """
    Special handling is required when a view over a larger array
    is assigned to an HDU and referenced from the ASDF tree.
    """
    file_path = tmp_path / "test.fits"

    data = np.arange(100, dtype=np.float64).reshape(5, 20)
    data_view = data[:, :10]

    hdul = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(data_view)])
    with asdf.fits_embed.AsdfInFits(hdulist=hdul) as af:
        af["data"] = hdul[-1].data
        af.write_to(file_path)

    with asdf.open(file_path) as af:
        assert_array_equal(af["data"], data_view)
Exemple #38
0
def test_stream():
    buff = io.BytesIO()

    tree = {'stream': stream.Stream([6, 2], np.float64)}

    ff = asdf.AsdfFile(tree)
    ff.write_to(buff)
    for i in range(100):
        buff.write(np.array([i] * 12, np.float64).tostring())

    buff.seek(0)

    with asdf.open(buff) as ff:
        assert len(ff.blocks) == 1
        assert ff.tree['stream'].shape == (100, 6, 2)
        for i, row in enumerate(ff.tree['stream']):
            assert np.all(row == i)
Exemple #39
0
def test_open_stream(tmp_path):
    file_path = tmp_path / "test.asdf"

    with asdf.AsdfFile() as af:
        af["foo"] = "bar"
        af.write_to(file_path)

    class StreamWrapper:
        def __init__(self, fd):
            self._fd = fd

        def read(self, size=-1):
            return self._fd.read(size)

    with file_path.open("rb") as fd:
        with asdf.open(StreamWrapper(fd)) as af:
            assert af["foo"] == "bar"
Exemple #40
0
def test_serialize_custom_type(tmpdir):
    with asdf.config_context() as config:
        config.add_resource_mapping({FOO_SCHEMA_URI: FOO_SCHEMA})
        config.add_extension(FooExtension())

        path = str(tmpdir / "test.asdf")

        af = asdf.AsdfFile()
        af["foo"] = Foo("bar")
        af.write_to(path)

        with asdf.open(path) as af2:
            assert af2["foo"].value == "bar"

        with pytest.raises(asdf.ValidationError):
            af["foo"] = Foo(12)
            af.write_to(path)
Exemple #41
0
def test_to_fits_sip():
    y, x = np.mgrid[:1024:10, :1024:10]
    xflat = np.ravel(x[1:-1, 1:-1])
    yflat = np.ravel(y[1:-1, 1:-1])
    af = asdf.open(get_pkg_data_filename('data/miriwcs.asdf'))
    miriwcs = af.tree['wcs']
    bounding_box = ((0, 1024), (0, 1024))
    mirisip = miriwcs.to_fits_sip(bounding_box, max_inv_pix_error=0.1)
    fitssip = astwcs.WCS(mirisip)
    fitsvalx, fitsvaly = fitssip.all_pix2world(xflat + 1, yflat + 1, 1)
    gwcsvalx, gwcsvaly = miriwcs(xflat, yflat)
    assert_allclose(gwcsvalx, fitsvalx, atol=1e-10, rtol=0)
    assert_allclose(gwcsvaly, fitsvaly, atol=1e-10, rtol=0)
    fits_inverse_valx, fits_inverse_valy = fitssip.all_world2pix(
        fitsvalx, fitsvaly, 1)
    assert_allclose(xflat, fits_inverse_valx - 1, atol=0.1, rtol=0)
    assert_allclose(yflat, fits_inverse_valy - 1, atol=0.1, rtol=0)
Exemple #42
0
def test_version_mismatch_with_supported_versions():
    """Make sure that defining the supported_versions field does not affect
    whether or not schema mismatch warnings are triggered."""
    class CustomFlow(object):
        pass

    class CustomFlowType(CustomTestType):
        version = '1.1.0'
        supported_versions = ['1.0.0', '1.1.0']
        name = 'custom_flow'
        organization = 'nowhere.org'
        standard = 'custom'
        types = [CustomFlow]

    class CustomFlowExtension(object):
        @property
        def types(self):
            return [CustomFlowType]

        @property
        def tag_mapping(self):
            return [('tag:nowhere.org:custom',
                     'http://nowhere.org/schemas/custom{tag_suffix}')]

        @property
        def url_mapping(self):
            return [
                ('http://nowhere.org/schemas/custom/',
                 util.filepath_to_url(TEST_DATA_PATH) + '/{url_suffix}.yaml')
            ]

    yaml = """
flow_thing:
  !<tag:nowhere.org:custom/custom_flow-1.0.0>
    c: 100
    d: 3.14
"""
    buff = helpers.yaml_to_asdf(yaml)
    with pytest.warns(None) as w:
        data = asdf.open(buff,
                         ignore_version_mismatch=False,
                         extensions=CustomFlowExtension())
    assert len(w) == 1, helpers.display_warnings(w)
    assert str(w[0].message) == (
        "'tag:nowhere.org:custom/custom_flow' with version 1.0.0 found in "
        "file, but latest supported version is 1.1.0")
def test_extra_properties():
    yaml = """
metadata: !core/extension_metadata-1.0.0
  extension_class: foo.extension.FooExtension
  software: !core/software-1.0.0
    name: FooSoft
    version: "1.5"
  extension_uri: http://foo.biz/extensions/foo-1.0.0
    """

    buff = helpers.yaml_to_asdf(yaml)

    with asdf.open(buff) as af:
        af["metadata"].extension_class == "foo.extension.FooExtension"
        af["metadata"].software["name"] == "FooSoft"
        af["metadata"].software["version"] == "1.5"
        af["metadata"]["extension_uri"] == "http://foo.biz/extensions/foo-1.0.0"
Exemple #44
0
def read_table(filename, data_key=None, find_table=None, **kwargs):
    """
    Read a `~astropy.table.Table` object from an ASDF file

    This requires `asdf <https://pypi.org/project/asdf/>`_ to be installed.
    By default, this function will look for a Table object with the key of
    ``data`` in the top-level ASDF tree. The parameters ``data_key`` and
    ``find_key`` can be used to override the default behavior.

    This function is registered as the Table reader for ASDF files with the
    unified I/O interface.

    Parameters
    ----------
    filename : str or :class:`py.lath:local`
        Name of the file to be read
    data_key : str
        Optional top-level key to use for finding the Table in the tree. If not
        provided, uses ``data`` by default. Use of this parameter is not
        compatible with ``find_table``.
    find_table : function
        Optional function to be used for locating the Table in the tree. The
        function takes a single parameter, which is a dictionary representing
        the top of the ASDF tree. The function must return a
        `~astropy.table.Table` instance.

    Returns
    -------
    table : `~astropy.table.Table`
        `~astropy.table.Table` instance
    """
    try:
        import asdf
    except ImportError:
        raise Exception(
            "The asdf module is required to read and write ASDF files")

    if data_key and find_table:
        raise ValueError(
            "Options 'data_key' and 'find_table' are not compatible")

    with asdf.open(filename, **kwargs) as af:
        if find_table:
            return find_table(af.tree)
        else:
            return af[data_key or 'data']
Exemple #45
0
def test_cartesian_spherical_asdf(tmpdir):
    s2c0 = geometry.SphericalToCartesian(wrap_lon_at=360)
    c2s0 = geometry.CartesianToSpherical(wrap_lon_at=180)

    # asdf round-trip test:
    assert_model_roundtrip(c2s0, tmpdir)
    assert_model_roundtrip(s2c0, tmpdir)

    # create file object
    f = asdf.AsdfFile({'c2s': c2s0, 's2c': s2c0})

    # write to...
    buf = io.BytesIO()
    f.write_to(buf)

    # read back:
    buf.seek(0)
    f = asdf.open(buf)

    # retrieve transformations:
    c2s = f['c2s']
    s2c = f['s2c']

    pcoords = [(45.0, -90.0), (45.0, -45.0), (45, 0.0), (45.0, 45),
               (45.0, 90.0), (135.0, -90.0), (135.0, -45.0), (135.0, 0.0),
               (135.0, 45.0), (135.0, 90.0)]

    ncoords = [(225.0, -90.0), (225.0, -45.0), (225.0, 0.0), (225.0, 45.0),
               (225.0, 90.0), (315.0, -90.0), (315.0, -45.0), (315.0, 0.0),
               (315.0, 45.0), (315.0, 90.0)]

    for lon, lat in pcoords:
        xyz = s2c(lon, lat)
        assert xyz == s2c0(lon, lat)
        lon2, lat2 = c2s(*xyz)
        assert lon2, lat2 == c2s0(*xyz)
        assert np.allclose((lon, lat), (lon2, lat2))

    for lon, lat in ncoords:
        xyz = s2c(lon, lat)
        assert xyz == s2c0(lon, lat)
        lon2, lat2 = c2s(*xyz)
        lon3, lat3 = s2c.inverse(*xyz)
        assert lon2, lat2 == c2s0(*xyz)
        assert np.allclose((lon, lat), (lon2 + 360, lat2))
        assert np.allclose((lon, lat), (lon3, lat2))
Exemple #46
0
def test_copy(tmpdir):
    tmpdir = str(tmpdir)

    my_array = np.random.rand(8, 8)
    tree = {'my_array': my_array, 'foo': {'bar': 'baz'}}
    ff = asdf.AsdfFile(tree)
    ff.write_to(os.path.join(tmpdir, 'test.asdf'))

    with asdf.open(os.path.join(tmpdir, 'test.asdf')) as ff:
        ff2 = ff.copy()
        ff2.tree['my_array'] *= 2
        ff2.tree['foo']['bar'] = 'boo'

        assert np.all(ff2.tree['my_array'] == ff.tree['my_array'] * 2)
        assert ff.tree['foo']['bar'] == 'baz'

    assert_array_equal(ff2.tree['my_array'], ff2.tree['my_array'])
Exemple #47
0
def test_serialize_table(tmpdir):
    tmpfile = str(tmpdir.join('table.fits'))

    data = np.random.random((10, 10))
    table = Table(data)

    hdu = fits.BinTableHDU(table)
    hdulist = fits.HDUList()
    hdulist.append(hdu)

    tree = {'my_table': hdulist[1].data}
    with fits_embed.AsdfInFits(hdulist, tree) as ff:
        ff.write_to(tmpfile)

    with asdf.open(tmpfile) as ff:
        data = ff.tree['my_table']
        assert data._source.startswith('fits:')
Exemple #48
0
def test_dots_but_no_block_index():
    # This puts `...` at the end of the file, so we sort of think
    # we might have a block index, but as it turns out, we don't
    # after reading a few chunks from the end of the file.
    buff = io.BytesIO()

    tree = {'array': np.ones((8, 8))}

    ff = asdf.AsdfFile(tree)
    ff.write_to(buff, include_block_index=False)

    buff.write(b'A' * 64000)
    buff.write(b'...\n')

    buff.seek(0)
    with asdf.open(buff) as ff:
        assert len(ff.blocks) == 1
Exemple #49
0
def test_exploded_stream_read(tmpdir, small_tree):
    # Reading from an exploded input file should fail, but only once
    # the data block is accessed.  This behavior is important so that
    # the tree can still be accessed even if the data is missing.

    path = os.path.join(str(tmpdir), 'test.asdf')

    ff = asdf.AsdfFile(small_tree)
    ff.write_to(path, all_array_storage='external')

    with open(path, 'rb') as fd:
        # This should work, so we can get the tree content
        x = generic_io.InputStream(fd, 'r')
        with asdf.open(x) as ff:
            # It's only when trying to access external data that an error occurs
            with pytest.raises(ValueError):
                ff.tree['science_data'][:]
Exemple #50
0
def imaging_distortion(input_model, reference_files):
    """
    Create the "detector" to "v2v3" transform for imaging mode.


    Parameters
    ----------
    input_model : `~jwst.datamodel.DataModel`
        Input datamodel for processing
    reference_files : dict
        The dictionary of reference file names and their associated files.

    Returns
    -------
    The transform model

    """
    dist = DistortionModel(reference_files['distortion'])
    transform = dist.model

    try:
        transform.bounding_box
    except NotImplementedError:
        # Check if the transform in the reference file has a ``bounding_box``.
        # If not set a ``bounding_box`` equal to the size of the image.
        transform.bounding_box = transform_bbox_from_shape(
            input_model.data.shape)
    dist.close()

    # Add an offset for the filter
    if reference_files['filteroffset'] is not None:
        obsfilter = input_model.meta.instrument.filter
        obspupil = input_model.meta.instrument.pupil
        with asdf.open(reference_files['filteroffset']) as filter_offset:
            filters = filter_offset.tree['filters']

        match_keys = {'filter': obsfilter, 'pupil': obspupil}
        row = find_row(filters, match_keys)
        if row is not None:
            col_offset = row.get('col_offset', 'N/A')
            row_offset = row.get('row_offset', 'N/A')

            if col_offset != 'N/A' and row_offset != 'N/A':
                transform = Shift(col_offset) & Shift(row_offset) | transform

    return transform
Exemple #51
0
def read_grid(filename, fmt=None):
	'''Read a grid from a file.

	Parameters
	----------
	filename : string
		The path of the file you want to read the grid from.
	fmt : string
		The file format. If it is not given, the file format will be guessed from the file extension.

	Returns
	-------
	Grid
		The read grid.

	Raises
	------
	ValueError
		If the file format could not be guessed from the file extension.
	NotImplementedError
		If the file format was not yet implemented.
	'''
	from ..field import Grid

	if fmt is None:
		fmt = _guess_file_format(filename)

		if fmt is None:
			raise ValueError('Format not given and could not be guessed based on the file extension.')

	if fmt in ['asdf', 'fits']:
		import asdf

		f = asdf.open(filename)
		grid = Grid.from_dict(f.tree['grid'])
		f.close()

		return grid
	elif fmt == 'pickle':
		import pickle

		with open(filename, 'rb') as f:
			return pickle.load(f)
	else:
		raise NotImplementedError('The "%s" file format has not been implemented.' % fmt)
Exemple #52
0
def test_access_tree_outside_handler(tmpdir):
    tempname = str(tmpdir.join('test.asdf'))

    tree = {'random': np.random.random(10)}

    ff = asdf.AsdfFile(tree)
    ff.write_to(str(tempname))

    with asdf.open(tempname) as newf:
        pass

    # Accessing array data outside of handler should fail
    with pytest.raises(OSError):
        repr(newf.tree['random'])

    # Using the top-level getattr should also fail
    with pytest.raises(OSError):
        repr(newf['random'])
Exemple #53
0
def test_masked_array_stay_open_bug(tmpdir):
    psutil = pytest.importorskip('psutil')

    tmppath = os.path.join(str(tmpdir), 'masked.asdf')

    tree = {'test': np.ma.array([1, 2, 3], mask=[False, True, False])}

    f = asdf.AsdfFile(tree)
    f.write_to(tmppath)

    p = psutil.Process()
    orig_open = p.open_files()

    for i in range(3):
        with asdf.open(tmppath) as f2:
            np.sum(f2.tree['test'])

    assert len(p.open_files()) == len(orig_open)
Exemple #54
0
def test_dont_load_data():
    x = np.arange(0, 10, dtype=np.float)
    tree = {'science_data': x, 'subset': x[3:-3], 'skipping': x[::2]}
    ff = asdf.AsdfFile(tree)

    buff = io.BytesIO()
    ff.write_to(buff)

    buff.seek(0)
    with asdf.open(buff) as ff:
        ff.run_hook('reserve_blocks')

        # repr and str shouldn't load data
        str(ff.tree['science_data'])
        repr(ff.tree)

        for block in ff.blocks.internal_blocks:
            assert block._data is None
Exemple #55
0
def assert_selector_roundtrip(s, tmpdir, version=None):
    """
    Assert that a selector can be written to an ASDF file and read back
    in without losing any of its essential properties.
    """
    path = str(tmpdir / "test.asdf")

    with asdf.AsdfFile({"selector": s}, version=version) as af:
        af.write_to(path)

    with asdf.open(path) as af:
        rs = af["selector"]
        if isinstance(s, selector.RegionsSelector):
            _assert_selector_equal(s, rs)
        elif isinstance(s, selector._LabelMapper):
            _assert_mapper_equal(s, rs)
        else:
            assert False
Exemple #56
0
def test_domain_orthopoly(tmpdir):
    model1d = astmodels.Chebyshev1D(2, c0=2, c1=3, c2=0.5, domain=[-2, 2])
    model2d = astmodels.Chebyshev2D(1,
                                    1,
                                    c0_0=1,
                                    c0_1=2,
                                    c1_0=3,
                                    x_domain=[-2, 2],
                                    y_domain=[-2, 2])
    fa = AsdfFile()
    fa.tree['model1d'] = model1d
    fa.tree['model2d'] = model2d

    file_path = str(tmpdir.join('orthopoly_domain.asdf'))
    fa.write_to(file_path)
    with asdf.open(file_path) as f:
        assert f.tree['model1d'](1.8) == model1d(1.8)
        assert f.tree['model2d'](1.8, -1.5) == model2d(1.8, -1.5)
Exemple #57
0
def test_missing_extension_warning():

    yaml = """
history:
  extensions:
    - !core/extension_metadata-1.0.0
      extension_class: foo.bar.FooBar
      software: !core/software-1.0.0
        name: foo
        version: 1.2.3
    """

    buff = yaml_to_asdf(yaml)
    with pytest.warns(
            AsdfWarning,
            match="File was created with extension class 'foo.bar.FooBar'"):
        with asdf.open(buff):
            pass
Exemple #58
0
def test_export_config(step_obj, expected, tmp_path):
    """Test retrieving of configuration parameters"""
    config_path = tmp_path / "config.asdf"
    step_obj.export_config(config_path)

    with asdf.open(config_path) as af:
        # StepConfig has an __eq__ implementation but we can't use it
        # due to differences between asdf 2.7 and 2.8 in serializing None
        # values.  This can be simplified once the minimum asdf requirement
        # is changed to >= 2.8.
        # assert StepConfig.from_asdf(af) == expected
        config = StepConfig.from_asdf(af)
        assert config.class_name == expected.class_name
        assert config.name == expected.name
        assert config.steps == expected.steps
        parameters = set(expected.parameters.keys()).union(set(config.parameters.keys()))
        for parameter in parameters:
            assert config.parameters.get(parameter) == expected.parameters.get(parameter)
Exemple #59
0
def explode(input, output=None):
    """
    Explode a given ASDF file so each data block is in a separate
    file.

    Parameters
    ----------
    input : str or file-like object
        The input file.

    output : str of file-like object
        The output file.
    """
    if output is None:
        base, ext = os.path.splitext(input)
        output = base + '_exploded' + '.asdf'
    with asdf.open(input) as ff:
        ff.write_to(output, all_array_storage='external')
Exemple #60
0
def read_metadata(filename):
    """
    Reads JWST data product and extracts metadata fields of interest

    Parameters
    ----------
    filename : `str`
        Path of JWST data file to be summarized

    Returns
    -------
    dict :
        `dict` contaning JWST metadata fields of interest
    """
    with asdf.open(filename,
                   ignore_unrecognized_tag=True,
                   ignore_missing_extensions=True) as af:
        return parse_metadata(af)