Exemple #1
0
    def _write_tree(self,
                    tree: dict,
                    output: Union[str, BinaryIO],
                    file_mode: int = 0o666) -> None:
        """
        Write the model to disk.

        :param tree: The data dict - will be the ASDF tree.
        :param output: The output file path or a file object.
        :param file_mode: The output file's permissions.
        :return: None
        """
        self.meta["created_at"] = get_datetime_now()
        meta = self.meta.copy()
        meta["environment"] = collect_environment()
        final_tree = {}
        final_tree.update(tree)
        final_tree["meta"] = meta
        isfileobj = not isinstance(output, str)
        if not isfileobj:
            self._source = output
            path = output
            output = open(output, "wb")
            os.chmod(path, file_mode)
            pos = 0
        else:
            pos = output.tell()
        try:
            with asdf.AsdfFile(final_tree) as file:
                queue = [("", tree)]
                while queue:
                    path, element = queue.pop()
                    if isinstance(element, dict):
                        for key, val in element.items():
                            queue.append((path + "/" + key, val))
                    elif isinstance(element, (list, tuple)):
                        for child in element:
                            queue.append((path, child))
                    elif isinstance(element, numpy.ndarray):
                        path += "/"
                        if path not in self._compression_prefixes:
                            self._log.debug("%s -> %s compression", path,
                                            self.ARRAY_COMPRESSION)
                            file.set_array_compression(element,
                                                       self.ARRAY_COMPRESSION)
                        else:
                            self._log.debug("%s -> compression disabled", path)
                file.write_to(output)
            self._size = output.seek(0, os.SEEK_END) - pos
        finally:
            if not isfileobj:
                output.close()
Exemple #2
0
def test_stream_real_file(tmpdir):
    path = os.path.join(str(tmpdir), 'test.asdf')

    tree = {
        'nonstream': np.array([1, 2, 3, 4], np.int64),
        'stream': stream.Stream([6, 2], np.float64)
    }

    with open(path, 'wb') as fd:
        ff = asdf.AsdfFile(tree)
        ff.write_to(fd)
        for i in range(100):
            fd.write(np.array([i] * 12, np.float64).tostring())

    with asdf.AsdfFile().open(path) as ff:
        assert len(ff.blocks) == 1
        assert_array_equal(ff.tree['nonstream'],
                           np.array([1, 2, 3, 4], np.int64))
        assert ff.tree['stream'].shape == (100, 6, 2)
        assert len(ff.blocks) == 2
        for i, row in enumerate(ff.tree['stream']):
            assert np.all(row == i)
Exemple #3
0
def test_arbitrary_python_object():
    # Putting "just any old" Python object in the tree should raise an
    # exception.

    class Foo(object):
        pass

    tree = {'object': Foo()}

    buff = io.BytesIO()
    ff = asdf.AsdfFile(tree)
    with pytest.raises(yaml.YAMLError):
        ff.write_to(buff)
Exemple #4
0
def output_to_asdf(output, input, label='output', outdir='.'):
    fname_prefix = input.get('prefix', '')
    fname_suffix = input.get('suffix', '')

    with asdf.AsdfFile() as f:
        f.tree.update(output)
        f.tree['input'] = copy.deepcopy(input)
        f.tree['experiment'] = copy.deepcopy(input['experiment'])
        fname = f'{fname_prefix}{label}{fname_suffix}.asdf'
        os.makedirs(outdir, exist_ok=True)
        # pdb.set_trace()
        f.write_to(outdir + "/" + fname)
        print(fname)
Exemple #5
0
def test_unicode_to_list(tmpdir):
    arr = np.array(['', '𐀠'], dtype='<U')
    tree = {'unicode': arr}

    fd = io.BytesIO()
    ff = asdf.AsdfFile(tree)
    ff.set_array_storage(arr, 'inline')
    ff.write_to(fd)
    fd.seek(0)

    with asdf.open(fd) as ff:
        ff.resolve_and_inline()
        ff.write_to(io.BytesIO())
Exemple #6
0
def test_checksum(tmpdir):
    tmpdir = str(tmpdir)
    path = os.path.join(tmpdir, 'test.asdf')

    my_array = np.arange(0, 64, dtype='<i8').reshape((8, 8))
    tree = {'my_array': my_array}
    ff = asdf.AsdfFile(tree)
    ff.write_to(path, auto_inline=None)

    with asdf.open(path, validate_checksums=True) as ff:
        assert type(ff.blocks._internal_blocks[0].checksum) == bytes
        assert ff.blocks._internal_blocks[0].checksum == \
            b'\xcaM\\\xb8t_L|\x00\n+\x01\xf1\xcfP1'
Exemple #7
0
def test_numpy_scalar(numpy_value, expected_value):
    ctx = asdf.AsdfFile()
    tree = {"value": numpy_value}
    buffer = io.BytesIO()

    yamlutil.dump_tree(tree, buffer, ctx)
    buffer.seek(0)

    if isinstance(expected_value, float) and NUMPY_LT_1_14:
        assert yamlutil.load_tree(buffer)["value"] == pytest.approx(
            expected_value, rel=0.001)
    else:
        assert yamlutil.load_tree(buffer)["value"] == expected_value
Exemple #8
0
def test_make_reference(tmpdir):
    exttree = {
        # Include some ~ and / in the name to make sure that escaping
        # is working correctly
        'f~o~o/': {
            'a': np.array([0, 1, 2], np.float),
            'b': np.array([3, 4, 5], np.float)
        }
    }
    external_path = os.path.join(str(tmpdir), 'external.asdf')
    ext = asdf.AsdfFile(exttree)
    ext.write_to(external_path)

    with asdf.open(external_path) as ext:
        ff = asdf.AsdfFile()
        ff.tree['ref'] = ext.make_reference(['f~o~o/', 'a'])
        assert_array_equal(ff.tree['ref'], ext.tree['f~o~o/']['a'])

        ff.write_to(os.path.join(str(tmpdir), 'source.asdf'))

    with asdf.open(os.path.join(str(tmpdir), 'source.asdf')) as ff:
        assert ff.tree['ref']._uri == 'external.asdf#f~0o~0o~1/a'
Exemple #9
0
def test_custom_validation_with_definitions_good(tmpdir):
    custom_schema_path = helpers.get_test_data_path(
        'custom_schema_definitions.yaml')
    asdf_file = str(tmpdir.join('out.asdf'))

    # This tree conforms to the custom schema
    tree = {'thing': {'biz': 'hello', 'baz': 'world'}}

    with asdf.AsdfFile(tree, custom_schema=custom_schema_path) as ff:
        ff.write_to(asdf_file)

    with asdf.open(asdf_file, custom_schema=custom_schema_path):
        pass
Exemple #10
0
def test_max_min_literals(num):

    tree = {
        'test_int': num,
    }

    with pytest.raises(ValidationError):
        asdf.AsdfFile(tree)

    tree = {
        'test_list': [num],
    }

    with pytest.raises(ValidationError):
        asdf.AsdfFile(tree)

    tree = {
        num: 'test_key',
    }

    with pytest.raises(ValidationError):
        asdf.AsdfFile(tree)
Exemple #11
0
def test_stream_to_stream():
    tree = {
        'nonstream': np.array([1, 2, 3, 4], np.int64),
        'stream': stream.Stream([6, 2], np.float64)
    }

    buff = io.BytesIO()
    fd = generic_io.OutputStream(buff)

    ff = asdf.AsdfFile(tree)
    ff.write_to(fd)
    for i in range(100):
        fd.write(np.array([i] * 12, np.float64).tostring())

    buff.seek(0)

    with asdf.AsdfFile().open(generic_io.InputStream(buff, 'r')) as ff:
        assert len(ff.blocks) == 2
        assert_array_equal(ff.tree['nonstream'], np.array([1, 2, 3, 4], np.int64))
        assert ff.tree['stream'].shape == (100, 6, 2)
        for i, row in enumerate(ff.tree['stream']):
            assert np.all(row == i)
Exemple #12
0
 def to_asdf(self, filename):
     """
     Serialize an `EmissionModel` to an ASDF file
     """
     tree = {
         'temperature': self.temperature,
         'density': self.density,
         'ions': [ion.ion_name for ion in self],
         'dset_names': [ion._dset_names for ion in self]
     }
     tree['emissivity_table_filename'] = self.emissivity_table_filename
     with asdf.AsdfFile(tree) as asdf_file:
         asdf_file.write_to(filename)
Exemple #13
0
def assert_model_roundtrip(model, tmpdir, version=None):
    """
    Assert that a model can be written to an ASDF file and read back
    in without losing any of its essential properties.
    """
    path = str(tmpdir / "test.asdf")

    with asdf.AsdfFile({"model": model}, version=version) as af:
        af.write_to(path)

    with asdf.open(path) as af:
        assert_models_equal(model, af["model"])
        return af["model"]
Exemple #14
0
def test_no_blocks(tmp_path, create_editor, version):
    file_path = str(tmp_path / "test.asdf")

    with asdf.AsdfFile(version=version) as af:
        af["foo"] = "bar"
        af.write_to(file_path)

    os.environ["EDITOR"] = create_editor(r"foo: bar", "foo: baz")

    assert main.main_from_args(["edit", file_path]) == 0

    with asdf.open(file_path) as af:
        assert af["foo"] == "baz"
Exemple #15
0
def test_copy_inline():
    yaml = """
x0: !core/ndarray-1.0.0
  data: [-1.0, 1.0]
    """

    buff = helpers.yaml_to_asdf(yaml)

    with asdf.open(buff) as infile:
        with asdf.AsdfFile() as f:
            f.tree['a'] = infile.tree['x0']
            f.tree['b'] = f.tree['a']
            f.write_to(io.BytesIO())
Exemple #16
0
 def to_asdf(self, filename):
     """
     Serialize this instance of `Skeleton` to an ASDF file
     """
     tree = {}
     for l in self.loops:
         tree[l.name] = {
             'field_strength': l.field_strength,
             'coordinate': l.coordinate,
             'model_results_filename': l.model_results_filename,
         }
     with asdf.AsdfFile(tree) as asdf_file:
         asdf_file.write_to(filename)
Exemple #17
0
def test_is_asdf(tmpdir):
    # test fits
    fits = pytest.importorskip('astropy.io.fits')

    hdul = fits.HDUList()
    phdu = fits.PrimaryHDU()
    imhdu = fits.ImageHDU(data=np.arange(24).reshape((4, 6)))
    hdul.append(phdu)
    hdul.append(imhdu)
    path = os.path.join(str(tmpdir), 'test.fits')
    hdul.writeto(path)
    assert not is_asdf_file(path)
    assert is_asdf_file(asdf.AsdfFile())
Exemple #18
0
 def test_empty_split_save_load_merge(self):
     strings = []
     merged = merge_strings(strings)
     assert_array_equal(merged["strings"], numpy.array([], dtype="S1"))
     assert_array_equal(merged["lengths"], numpy.array([], dtype=int))
     self.assertIsNone(merged["str"])
     af = asdf.AsdfFile(merged)
     buffer = BytesIO()
     af.write_to(buffer)
     buffer.seek(0)
     af_loaded = asdf.open(buffer)
     strings_restored = split_strings(af_loaded.tree)
     self.assertEqual(strings, strings_restored)
Exemple #19
0
def test_no_block_index():
    buff = io.BytesIO()

    arrays = []
    for i in range(10):
        arrays.append(np.ones((8, 8)) * i)

    tree = {'arrays': arrays}

    ff = asdf.AsdfFile(tree)
    ff.write_to(buff, include_block_index=False)

    assert constants.INDEX_HEADER not in buff.getvalue()
Exemple #20
0
def test_recompression(tmpdir):
    tree = _get_large_tree()
    tmpfile = os.path.join(str(tmpdir), 'test1.asdf')
    afile = asdf.AsdfFile(tree)
    afile.write_to(tmpfile, all_array_compression='zlib')
    afile.close()
    afile = asdf.AsdfFile.open(tmpfile)
    tmpfile = os.path.join(str(tmpdir), 'test2.asdf')
    afile.write_to(tmpfile, all_array_compression='bzp2')
    afile.close()
    afile = asdf.AsdfFile.open(tmpfile)
    helpers.assert_tree_match(tree, afile.tree)
    afile.close()
Exemple #21
0
def test_update_expand_tree(tmpdir):
    tmpdir = str(tmpdir)
    testpath = os.path.join(tmpdir, "test.asdf")

    # This is the case where the new tree can't fit in the available space
    my_array = np.arange(64) * 1
    my_array2 = np.arange(64) * 2
    tree = {'arrays': [my_array, my_array2, np.arange(3)]}

    ff = asdf.AsdfFile(tree)
    ff.set_array_storage(tree['arrays'][2], 'inline')
    assert len(list(ff.blocks.inline_blocks)) == 1
    ff.write_to(testpath, auto_inline=None, pad_blocks=True)
    with asdf.open(testpath, mode='rw') as ff:
        assert_array_equal(ff.tree['arrays'][0], my_array)
        orig_offset = ff.blocks[ff.tree['arrays'][0]].offset
        ff.tree['extra'] = [0] * 6000
        ff.update()

    with asdf.open(testpath) as ff:
        assert orig_offset <= ff.blocks[ff.tree['arrays'][0]].offset
        assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline'
        assert_array_equal(ff.tree['arrays'][0], my_array)
        assert_array_equal(ff.tree['arrays'][1], my_array2)

    # Now, we expand the header only by a little bit
    ff = asdf.AsdfFile(tree)
    ff.set_array_storage(tree['arrays'][2], 'inline')
    ff.write_to(os.path.join(tmpdir, "test2.asdf"), pad_blocks=True)
    with asdf.open(os.path.join(tmpdir, "test2.asdf"), mode='rw') as ff:
        orig_offset = ff.blocks[ff.tree['arrays'][0]].offset
        ff.tree['extra'] = [0] * 2
        ff.update()

    with asdf.open(os.path.join(tmpdir, "test2.asdf")) as ff:
        assert orig_offset == ff.blocks[ff.tree['arrays'][0]].offset
        assert ff.blocks[ff.tree['arrays'][2]].array_storage == 'inline'
        assert_array_equal(ff.tree['arrays'][0], my_array)
        assert_array_equal(ff.tree['arrays'][1], my_array2)
def test_versioned_writing():
    from ..tags.core.complex import ComplexType

    # Create a bogus version map
    versioning._version_map['42.0.0'] = {
        'FILE_FORMAT': '42.0.0',
        'YAML_VERSION': '1.1',
        'tags': {
            'tag:stsci.edu:asdf/core/complex': '42.0.0',
            'tag:stscu.edu:asdf/core/asdf': '1.0.0'
        }
    }

    versioning.supported_versions.append(versioning.AsdfVersion('42.0.0'))

    class FancyComplexType(ComplexType, asdftypes.CustomType):
        version = (42, 0, 0)

    # This is a sanity check to ensure that the custom FancyComplexType does
    # not get added to ASDF's built-in extension, since this would cause any
    # subsequent tests that rely on ComplexType to fail.
    assert not issubclass(FancyComplexType, asdftypes.AsdfTypeMeta)

    class FancyComplexExtension(object):
        @property
        def types(self):
            return [FancyComplexType]

        @property
        def tag_mapping(self):
            return []

        @property
        def url_mapping(self):
            return [
                ('http://stsci.edu/schemas/asdf/core/complex-42.0.0',
                 util.filepath_to_url(TEST_DATA_PATH) + '/complex-42.0.0.yaml')
            ]

    tree = {'a': complex(0, -1)}

    buff = io.BytesIO()
    ff = asdf.AsdfFile(tree,
                       version="42.0.0",
                       extensions=[FancyComplexExtension()])
    ff.write_to(buff)

    assert b'complex-42.0.0' in buff.getvalue()

    del versioning._version_map['42.0.0']
    versioning.supported_versions.pop()
Exemple #23
0
def test_large_literals(use_numpy):

    largeval = 1 << 53
    if use_numpy:
        largeval = np.uint64(largeval)

    tree = {
        'large_int': largeval,
    }

    with pytest.raises(ValidationError):
        asdf.AsdfFile(tree)

    tree = {
        'large_list': [largeval],
    }

    with pytest.raises(ValidationError):
        asdf.AsdfFile(tree)

    tree = {
        largeval: 'large_key',
    }

    with pytest.raises(ValidationError):
        asdf.AsdfFile(tree)

    tree = {'large_array': np.array([largeval], np.uint64)}

    ff = asdf.AsdfFile(tree)
    buff = io.BytesIO()
    ff.write_to(buff)

    ff.set_array_storage(ff.tree['large_array'], 'inline')
    buff = io.BytesIO()
    with pytest.raises(ValidationError):
        ff.write_to(buff)
        print(buff.getvalue())
Exemple #24
0
def test_get_history_entries(tmpdir):
    """
    Test edge cases for the get_history_entries API. Other cases tested above
    """

    tmpfile = str(tmpdir.join('empty.asdf'))

    with asdf.AsdfFile() as af:
        af.write_to(tmpfile)

    # Make sure this works when there is no history section at all
    with asdf.open(tmpfile) as af:
        assert len(af['history']['extensions']) > 0
        assert len(af.get_history_entries()) == 0
Exemple #25
0
def test_input(tmpdir):
    tree = _get_large_tree()
    tmpfile = os.path.join(str(tmpdir), 'test1.asdf')
    afile = asdf.AsdfFile(tree)
    afile.write_to(tmpfile, all_array_compression='zlib')
    afile.close()
    afile = asdf.AsdfFile.open(tmpfile)
    tmpfile = os.path.join(str(tmpdir), 'test2.asdf')
    afile.write_to(tmpfile)
    afile.close()
    afile = asdf.AsdfFile.open(tmpfile)
    helpers.assert_tree_match(tree, afile.tree)
    assert afile.get_array_compression(afile.tree['science_data']) == 'zlib'
    afile.close()
Exemple #26
0
def test_pad_blocks(tmpdir):
    tmpdir = str(tmpdir)

    # This is the case where the new tree can't fit in the available space
    my_array = np.ones((8, 8)) * 1
    my_array2 = np.ones((42, 5)) * 2
    tree = {'my_array': my_array, 'my_array2': my_array2}

    ff = asdf.AsdfFile(tree)
    ff.write_to(os.path.join(tmpdir, "test.asdf"), pad_blocks=True)

    with asdf.open(os.path.join(tmpdir, "test.asdf")) as ff:
        assert_array_equal(ff.tree['my_array'], my_array)
        assert_array_equal(ff.tree['my_array2'], my_array2)
Exemple #27
0
def mk_flat(outfilepath):

    meta = {}
    add_ref_common(meta)
    flatref = stnode.FlatRef()
    meta['reftype'] = 'FLAT'
    flatref['meta'] = meta
    shape = (20, 20)
    flatref['data'] = np.zeros(shape, dtype=np.float32)
    flatref['dq'] = np.zeros(shape, dtype=np.uint32)
    flatref['err'] = np.zeros(shape, dtype=np.float32)
    af = asdf.AsdfFile()
    af.tree = {'roman': flatref}
    af.write_to(outfilepath)
Exemple #28
0
def test_embed_asdf_in_fits_file_anonymous_extensions(tmpdir, dtype):
    # Write the AsdfInFits object out as a FITS file with ASDF extension
    asdf_in_fits = create_asdf_in_fits(dtype)
    asdf_in_fits.write_to(os.path.join(str(tmpdir), 'test.fits'))

    ff2 = asdf.AsdfFile(asdf_in_fits.tree)
    ff2.write_to(os.path.join(str(tmpdir), 'plain.asdf'))

    with fits.open(os.path.join(str(tmpdir), 'test.fits')) as hdulist:
        assert len(hdulist) == 4
        assert [x.name for x in hdulist] == ['PRIMARY', '', '', 'ASDF']
        asdf_hdu = hdulist['ASDF']
        assert isinstance(asdf_hdu, fits.BinTableHDU)
        assert asdf_hdu.data.tobytes().startswith(b'#ASDF')

        with fits_embed.AsdfInFits.open(hdulist) as ff2:
            assert_tree_match(asdf_in_fits.tree, ff2.tree)

            ff = asdf.AsdfFile(copy.deepcopy(ff2.tree))
            ff.write_to(os.path.join(str(tmpdir), 'test.asdf'))

    with asdf.open(os.path.join(str(tmpdir), 'test.asdf')) as ff:
        assert_tree_match(asdf_in_fits.tree, ff.tree)
Exemple #29
0
def test_update_exceptions(tmpdir):
    tmpdir = str(tmpdir)
    path = os.path.join(tmpdir, 'test.asdf')

    my_array = np.random.rand(8, 8)
    tree = {'my_array': my_array}
    ff = asdf.AsdfFile(tree)
    ff.write_to(path)

    with asdf.open(path, mode='r', copy_arrays=True) as ff:
        with pytest.raises(IOError):
            ff.update()

    ff = asdf.AsdfFile(tree)
    buff = io.BytesIO()
    ff.write_to(buff)

    buff.seek(0)
    with asdf.open(buff, mode='rw') as ff:
        ff.update()

    with pytest.raises(ValueError):
        asdf.AsdfFile().update()
Exemple #30
0
def test_extension_metadata(tmpdir):

    ff = asdf.AsdfFile()

    tmpfile = str(tmpdir.join('extension.asdf'))
    ff.write_to(tmpfile)

    with asdf.open(tmpfile) as af:
        assert len(af.tree['history']['extensions']) == 1
        metadata = af.tree['history']['extensions'][0]
        assert metadata.extension_class == 'asdf.extension.BuiltinExtension'
        # Don't bother with testing the version here since it will depend on
        # how recently the package was built (version is auto-generated)
        assert metadata.software['name'] == 'asdf'