Exemple #1
0
def default_lapl():
    """ Default link access property list """
    lapl = h5p.create(h5p.LINK_ACCESS)
    fapl = h5p.create(h5p.FILE_ACCESS)
    fapl.set_fclose_degree(h5f.CLOSE_STRONG)
    lapl.set_elink_fapl(fapl)
    return lapl
Exemple #2
0
    def test_obj_track_times(self):
        """
        tests if the object track times  set/get
        """
        # test for groups
        gcid = h5p.create(h5p.GROUP_CREATE)
        gcid.set_obj_track_times(False)
        self.assertEqual(False, gcid.get_obj_track_times())

        gcid.set_obj_track_times(True)
        self.assertEqual(True, gcid.get_obj_track_times())
        # test for datasets
        dcid = h5p.create(h5p.DATASET_CREATE)
        dcid.set_obj_track_times(False)
        self.assertEqual(False, dcid.get_obj_track_times())

        dcid.set_obj_track_times(True)
        self.assertEqual(True, dcid.get_obj_track_times())

        # test for generic objects
        ocid = h5p.create(h5p.OBJECT_CREATE)
        ocid.set_obj_track_times(False)
        self.assertEqual(False, ocid.get_obj_track_times())

        ocid.set_obj_track_times(True)
        self.assertEqual(True, ocid.get_obj_track_times())
Exemple #3
0
    def test_obj_track_times(self):
        """
        tests if the object track times  set/get
        """
        # test for groups
        gcid = h5p.create(h5p.GROUP_CREATE)
        gcid.set_obj_track_times(False)
        self.assertEqual(False,gcid.get_obj_track_times())

        gcid.set_obj_track_times(True)
        self.assertEqual(True,gcid.get_obj_track_times())
        # test for datasets
        dcid = h5p.create(h5p.DATASET_CREATE)
        dcid.set_obj_track_times(False)
        self.assertEqual(False,dcid.get_obj_track_times())

        dcid.set_obj_track_times(True)
        self.assertEqual(True,dcid.get_obj_track_times())

        # test for generic objects
        ocid = h5p.create(h5p.OBJECT_CREATE)
        ocid.set_obj_track_times(False)
        self.assertEqual(False,ocid.get_obj_track_times())

        ocid.set_obj_track_times(True)
        self.assertEqual(True,ocid.get_obj_track_times())
Exemple #4
0
def default_lapl():
    """ Default link access property list """
    lapl = h5p.create(h5p.LINK_ACCESS)
    fapl = h5p.create(h5p.FILE_ACCESS)
    fapl.set_fclose_degree(h5f.CLOSE_STRONG)
    lapl.set_elink_fapl(fapl)
    return lapl
Exemple #5
0
def make_fapl(driver,libver,**kwds):
    """ Set up a file access property list """
    plist = h5p.create(h5p.FILE_ACCESS)
    plist.set_fclose_degree(h5f.CLOSE_STRONG)

    if libver is not None:
        if libver in libver_dict:
            low = libver_dict[libver]
            high = h5f.LIBVER_LATEST
        else:
            low, high = (libver_dict[x] for x in libver)
        plist.set_libver_bounds(low, high)

    if driver is None or (driver=='windows' and sys.platform=='win32'):
        return plist

    if(driver=='sec2'):
        plist.set_fapl_sec2(**kwds)
    elif(driver=='stdio'):
        plist.set_fapl_stdio(**kwds)
    elif(driver=='core'):
        plist.set_fapl_core(**kwds)
    elif(driver=='family'):
        plist.set_fapl_family(memb_fapl=plist.copy(), **kwds)
    else:
        raise ValueError('Unknown driver type "%s"' % driver)

    return plist
Exemple #6
0
def create_entry(obj, name, timestamp, **attributes):
    """Create a new ARF entry under obj, setting required attributes.

    An entry is an abstract collection of data which all refer to the same time
    frame. Data can include physiological recordings, sound recordings, and
    derived data such as spike times and labels. See add_data() for information
    on how data are stored.

    name -- the name of the new entry. any valid python string.

    timestamp -- timestamp of entry (datetime object, or seconds since
               January 1, 1970). Can be an integer, a float, or a tuple
               of integers (seconds, microsceconds)

    Additional keyword arguments are set as attributes on created entry.

    Returns: newly created entry object

    """
    # create group using low-level interface to store creation order
    from h5py import h5p, h5g
    from h5py._hl import group
    try:
        gcpl = h5p.create(h5p.GROUP_CREATE)
        gcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED
                                     | h5p.CRT_ORDER_INDEXED)
    except AttributeError:
        grp = obj.create_group(name)
    else:
        grp = group.Group(h5g.create(obj.id, name, lcpl=None, gcpl=gcpl))
    set_uuid(grp, attributes.pop("uuid", None))
    set_attributes(grp, timestamp=convert_timestamp(timestamp), **attributes)
    return grp
Exemple #7
0
def make_fapl(driver, libver, **kwds):
    """ Set up a file access property list """
    plist = h5p.create(h5p.FILE_ACCESS)
    plist.set_fclose_degree(h5f.CLOSE_STRONG)

    if libver is not None:
        if libver in libver_dict:
            low = libver_dict[libver]
            high = h5f.LIBVER_LATEST
        else:
            low, high = (libver_dict[x] for x in libver)
        plist.set_libver_bounds(low, high)

    if driver is None or (driver == "windows" and sys.platform == "win32"):
        return plist

    if driver == "sec2":
        plist.set_fapl_sec2(**kwds)
    elif driver == "stdio":
        plist.set_fapl_stdio(**kwds)
    elif driver == "core":
        plist.set_fapl_core(**kwds)
    elif driver == "family":
        plist.set_fapl_family(memb_fapl=plist.copy(), **kwds)
    elif driver == "mpio":
        kwds.setdefault("info", mpi4py.MPI.Info())
        plist.set_fapl_mpio(**kwds)
    else:
        raise ValueError('Unknown driver type "%s"' % driver)

    return plist
Exemple #8
0
    def ensureHdf(file, mode):
        if isinstance(file, str) or file is None:
            driver, driverKwds = None, {}

            if file is None:
                file = tempfile.mktemp(suffix=".hdf")
                driver, driverKwds = "core", {"backing_store": False}

            dirname = os.path.dirname(os.path.abspath(file))
            if not os.path.exists(dirname):
                os.makedirs(dirname)

            return h5py.File(file,
                             mode,
                             libver="earliest",
                             driver=driver,
                             **driverKwds)

        elif isinstance(file, bytes):
            fapl = h5p.create(h5p.FILE_ACCESS)
            fapl.set_fapl_core()
            fapl.set_file_image(file)

            fid = h5f.open(tempfile.mktemp(suffix=".hdf").encode(),
                           h5f.ACC_RDONLY,
                           fapl=fapl)
            return h5py.File(fid)

        else:
            return file
Exemple #9
0
def open_file(name, mode=None, driver=None, libver=None, userblock_size=None, **kwargs):
    """Open an ARF file, creating as necessary.

    Use this instead of h5py.File to ensure that root-level attributes and group
    creation property lists are set correctly.

    """
    import os
    from h5py import h5p
    from h5py._hl import files

    exists = os.path.exists(name)
    try:
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED)
    except AttributeError:
        # older version of h5py
        fp = files.File(name, mode=mode, driver=driver, libver=libver, **kwargs)
    else:
        fapl = files.make_fapl(driver, libver, **kwargs)
        fp = files.File(files.make_fid(name, mode, userblock_size, fapl, fcpl))

    if not exists and fp.mode == 'r+':
        set_attributes(fp,
                       arf_library='python',
                       arf_library_version=__version__,
                       arf_version=spec_version)
    return fp
Exemple #10
0
def create_entry(group, name, timestamp, **attributes):
    """Create a new ARF entry under group, setting required attributes.

    An entry is an abstract collection of data which all refer to the same time
    frame. Data can include physiological recordings, sound recordings, and
    derived data such as spike times and labels. See add_data() for information
    on how data are stored.

    name -- the name of the new entry. any valid python string.

    timestamp -- timestamp of entry (datetime object, or seconds since
               January 1, 1970). Can be an integer, a float, or a tuple
               of integers (seconds, microsceconds)

    Additional keyword arguments are set as attributes on created entry.

    Returns: newly created entry object

    """
    # create group using low-level interface to store creation order
    from h5py import h5p, h5g, _hl
    try:
        gcpl = h5p.create(h5p.GROUP_CREATE)
        gcpl.set_link_creation_order(
            h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED)
    except AttributeError:
        grp = group.create_group(name)
    else:
        name, lcpl = group._e(name, lcpl=True)
        grp = _hl.group.Group(h5g.create(group.id, name, lcpl=lcpl, gcpl=gcpl))
    set_uuid(grp, attributes.pop("uuid", None))
    set_attributes(grp,
                   timestamp=convert_timestamp(timestamp),
                   **attributes)
    return grp
Exemple #11
0
def make_fapl(driver, libver, **kwds):
    """ Set up a file access property list """
    plist = h5p.create(h5p.FILE_ACCESS)

    if libver is not None:
        if libver in libver_dict:
            low = libver_dict[libver]
            high = h5f.LIBVER_LATEST
        else:
            low, high = (libver_dict[x] for x in libver)
        plist.set_libver_bounds(low, high)

    if driver is None or (driver == 'windows' and sys.platform == 'win32'):
        return plist

    if (driver == 'sec2'):
        plist.set_fapl_sec2(**kwds)
    elif (driver == 'stdio'):
        plist.set_fapl_stdio(**kwds)
    elif (driver == 'core'):
        plist.set_fapl_core(**kwds)
    elif (driver == 'family'):
        plist.set_fapl_family(memb_fapl=plist.copy(), **kwds)
    elif (driver == 'mpio'):
        kwds.setdefault('info', mpi4py.MPI.Info())
        plist.set_fapl_mpio(**kwds)
    else:
        raise ValueError('Unknown driver type "%s"' % driver)

    return plist
Exemple #12
0
    def test_set_alignment(self):
        '''test get/set chunk cache '''
        falist = h5p.create(h5p.FILE_ACCESS)
        threshold = 10 * 1024  # threshold of 10kiB
        alignment = 1024 * 1024  # threshold of 1kiB

        falist.set_alignment(threshold, alignment)
        self.assertEqual((threshold, alignment), falist.get_alignment())
Exemple #13
0
    def test_set_alignment(self):
        """test get/set chunk cache """
        falist = h5p.create(h5p.FILE_ACCESS)
        threshold = 10 * 1024  # threshold of 10kiB
        alignment = 1024 * 1024  # threshold of 1kiB

        falist.set_alignment(threshold, alignment)
        self.assertEqual((threshold, alignment), falist.get_alignment())
Exemple #14
0
    def test_virtual_prefix(self):
        '''test get/set virtual prefix '''
        dalist = h5p.create(h5p.DATASET_ACCESS)
        self.assertEqual(dalist.get_virtual_prefix().decode(), '')

        virtual_prefix = "path/to/virtual/dataset"
        dalist.set_virtual_prefix(virtual_prefix.encode('utf-8'))
        self.assertEqual(dalist.get_virtual_prefix().decode(), virtual_prefix)
Exemple #15
0
    def test_link_creation_tracking(self):
        """
        tests the link creation order set/get
        """

        gcid = h5p.create(h5p.GROUP_CREATE)
        gcid.set_link_creation_order(0)
        self.assertEqual(0, gcid.get_link_creation_order())

        flags = h5p.CRT_ORDER_TRACKED|h5p.CRT_ORDER_INDEXED
        gcid.set_link_creation_order(flags)
        self.assertEqual(flags, gcid.get_link_creation_order())

        # test for file creation
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_link_creation_order(flags)
        self.assertEqual(flags, fcpl.get_link_creation_order())
Exemple #16
0
    def test_link_creation_tracking(self):
        """
        tests the link creation order set/get
        """

        gcid = h5p.create(h5p.GROUP_CREATE)
        gcid.set_link_creation_order(0)
        self.assertEqual(0, gcid.get_link_creation_order())

        flags = h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED
        gcid.set_link_creation_order(flags)
        self.assertEqual(flags, gcid.get_link_creation_order())

        # test for file creation
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_link_creation_order(flags)
        self.assertEqual(flags, fcpl.get_link_creation_order())
def make_fid(name, mode, userblock_size, fapl, fcpl=None):
    """ Get a new FileID by opening or creating a file.
    Also validates mode argument."""

    if userblock_size is not None:
        if mode in ("r", "r+"):
            raise ValueError("User block may only be specified " "when creating a file")
        try:
            userblock_size = int(userblock_size)
        except (TypeError, ValueError):
            raise ValueError("User block size must be an integer")
        if fcpl is None:
            fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_userblock(userblock_size)

    if mode == "r":
        fid = h5f.open(name, h5f.ACC_RDONLY, fapl=fapl)
    elif mode == "r+":
        fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
    elif mode == "w-":
        fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    elif mode == "w":
        fid = h5f.create(name, h5f.ACC_TRUNC, fapl=fapl, fcpl=fcpl)
    elif mode == "a":
        # Open in append mode (read/write).
        # If that fails, create a new file only if it won't clobber an
        # existing one (ACC_EXCL)
        try:
            fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
        except IOError:
            fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    elif mode is None:
        # Try to open in append mode (read/write).
        # If that fails, try readonly, and finally create a new file only
        # if it won't clobber an existing file (ACC_EXCL).
        try:
            fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
        except IOError:
            try:
                fid = h5f.open(name, h5f.ACC_RDONLY, fapl=fapl)
            except IOError:
                fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    else:
        raise ValueError("Invalid mode; must be one of r, r+, w, w-, a")

    try:
        if userblock_size is not None:
            existing_fcpl = fid.get_create_plist()
            if existing_fcpl.get_userblock() != userblock_size:
                raise ValueError(
                    "Requested userblock size (%d) does not match that of existing file (%d)"
                    % (userblock_size, existing_fcpl.get_userblock())
                )
    except:
        fid.close()
        raise

    return fid
Exemple #18
0
def make_fid(name, mode, userblock_size, fapl, fcpl=None):
    """ Get a new FileID by opening or creating a file.
    Also validates mode argument."""

    if userblock_size is not None:
        if mode in ('r', 'r+'):
            raise ValueError("User block may only be specified "
                             "when creating a file")
        try:
            userblock_size = int(userblock_size)
        except (TypeError, ValueError):
            raise ValueError("User block size must be an integer")
        if fcpl is None:
            fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_userblock(userblock_size)

    if mode == 'r':
        fid = h5f.open(name, h5f.ACC_RDONLY, fapl=fapl)
    elif mode == 'r+':
        fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
    elif mode == 'w-':
        fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    elif mode == 'w':
        fid = h5f.create(name, h5f.ACC_TRUNC, fapl=fapl, fcpl=fcpl)
    elif mode == 'a':
        # Open in append mode (read/write).
        # If that fails, create a new file only if it won't clobber an
        # existing one (ACC_EXCL)
        try:
            fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
        except IOError:
            fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    elif mode is None:
        # Try to open in append mode (read/write).
        # If that fails, try readonly, and finally create a new file only
        # if it won't clobber an existing file (ACC_EXCL).
        try:
            fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
        except IOError:
            try:
                fid = h5f.open(name, h5f.ACC_RDONLY, fapl=fapl)
            except IOError:
                fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    else:
        raise ValueError("Invalid mode; must be one of r, r+, w, w-, a")

    try:
        if userblock_size is not None:
            existing_fcpl = fid.get_create_plist()
            if existing_fcpl.get_userblock() != userblock_size:
                raise ValueError(
                    "Requested userblock size (%d) does not match that of existing file (%d)"
                    % (userblock_size, existing_fcpl.get_userblock()))
    except:
        fid.close()
        raise

    return fid
Exemple #19
0
    def test_chunk_cache(self):
        '''test get/set chunk cache '''
        dalist = h5p.create(h5p.DATASET_ACCESS)
        nslots = 10000  # 40kb hash table
        nbytes = 1000000  # 1MB cache size
        w0 = .5  # even blend of eviction strategy

        dalist.set_chunk_cache(nslots, nbytes, w0)
        self.assertEqual((nslots, nbytes, w0), dalist.get_chunk_cache())
Exemple #20
0
    def test_set_file_locking(self):
        '''test get/set file locking'''
        falist = h5p.create(h5p.FILE_ACCESS)
        use_file_locking = False
        ignore_when_disabled = False

        falist.set_file_locking(use_file_locking, ignore_when_disabled)
        self.assertEqual((use_file_locking, ignore_when_disabled),
                         falist.get_file_locking())
Exemple #21
0
    def test_chunk_cache(self):
        """test get/set chunk cache """
        dalist = h5p.create(h5p.DATASET_ACCESS)
        nslots = 10000  # 40kb hash table
        nbytes = 1000000  # 1MB cache size
        w0 = 0.5  # even blend of eviction strategy

        dalist.set_chunk_cache(nslots, nbytes, w0)
        self.assertEqual((nslots, nbytes, w0), dalist.get_chunk_cache())
Exemple #22
0
    def test_efile_prefix(self):
        '''test get/set efile prefix '''
        dalist = h5p.create(h5p.DATASET_ACCESS)
        self.assertEqual(dalist.get_efile_prefix().decode(), '')

        efile_prefix = "path/to/external/dataset"
        dalist.set_efile_prefix(efile_prefix.encode('utf-8'))
        self.assertEqual(dalist.get_efile_prefix().decode(), efile_prefix)

        efile_prefix = "${ORIGIN}"
        dalist.set_efile_prefix(efile_prefix.encode('utf-8'))
        self.assertEqual(dalist.get_efile_prefix().decode(), efile_prefix)
Exemple #23
0
def test_file(tmp_path):
    image = decompress(a2b_base64(compressed_image))

    # FAPL: File access property list
    fapl = h5p.create(h5py.h5p.FILE_ACCESS)
    fapl.set_fapl_core()
    fapl.set_file_image(image)

    file = tmp_path / "disk.h5"
    fid = h5f.open(bytes(file), h5py.h5f.ACC_RDONLY, fapl=fapl)
    with h5py.File(fid) as f:
        assert (f["test"][:] == [1, 2, 3]).all()
        assert f["test"].dtype == "int64"
Exemple #24
0
    def test_load_from_image(self):
        from binascii import a2b_base64
        from zlib import decompress

        compressed_image = 'eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KKjhQ+TD5BCjNCKU7oPQKJpg4I1hOAiouCDUfXV1IkKsrSPV/NACzx4AFQnMwjIKRCDxcHQNAdASUD0ulJ5hQ1ZWkFpeAaFh69KDQXkYGNohZjDA+JCUzMkIEmKHqELQAWKkAByytOoBJViAPJM7ExATWyAE0B8RgZkyAJmlYDoEAIahukJoNU6+HMTA0UOgT6oBgP38XUI6G5UMFZrzKR8EoGAUjGMDKYVgxDSsuAHcfMK8='

        image = decompress(a2b_base64(compressed_image))

        fapl = h5p.create(h5py.h5p.FILE_ACCESS)
        fapl.set_fapl_core()
        fapl.set_file_image(image)

        fid = h5f.open(self.mktemp().encode(), h5py.h5f.ACC_RDONLY, fapl=fapl)
        f = h5py.File(fid)

        self.assertTrue('test' in f)
Exemple #25
0
    def test_load_from_image(self):
        from binascii import a2b_base64
        from zlib import decompress

        compressed_image = 'eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KKjhQ+TD5BCjNCKU7oPQKJpg4I1hOAiouCDUfXV1IkKsrSPV/NACzx4AFQnMwjIKRCDxcHQNAdASUD0ulJ5hQ1ZWkFpeAaFh69KDQXkYGNohZjDA+JCUzMkIEmKHqELQAWKkAByytOoBJViAPJM7ExATWyAE0B8RgZkyAJmlYDoEAIahukJoNU6+HMTA0UOgT6oBgP38XUI6G5UMFZrzKR8EoGAUjGMDKYVgxDSsuAHcfMK8='

        image = decompress(a2b_base64(compressed_image))

        fapl = h5p.create(h5py.h5p.FILE_ACCESS)
        fapl.set_fapl_core()
        fapl.set_file_image(image)

        fid = h5f.open(self.mktemp().encode(), h5py.h5f.ACC_RDONLY, fapl=fapl)
        f = h5py.File(fid)

        self.assertTrue('test' in f)
Exemple #26
0
def open_file(name,
              mode=None,
              driver=None,
              libver=None,
              userblock_size=None,
              **kwargs):
    """Open an ARF file, creating as necessary.

    Use this instead of h5py.File to ensure that root-level attributes and group
    creation property lists are set correctly.

    """
    import sys
    import os
    from h5py import h5p
    from h5py._hl import files

    try:
        # If the byte string doesn't match the default
        # encoding, just pass it on as-is.  Note Unicode
        # objects can always be encoded.
        name = name.encode(sys.getfilesystemencoding())
    except (UnicodeError, LookupError):
        pass
    exists = os.path.exists(name)
    try:
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED
                                     | h5p.CRT_ORDER_INDEXED)
    except AttributeError:
        # older version of h5py
        fp = files.File(name,
                        mode=mode,
                        driver=driver,
                        libver=libver,
                        **kwargs)
    else:
        fapl = files.make_fapl(driver, libver, **kwargs)
        fp = files.File(files.make_fid(name, mode, userblock_size, fapl, fcpl))

    if not exists and fp.mode == 'r+':
        set_attributes(fp,
                       arf_library='python',
                       arf_library_version=__version__,
                       arf_version=spec_version)
    return fp
Exemple #27
0
    def test_attr_phase_change(self):
        """
        test the attribute phase change
        """

        cid = h5p.create(h5p.OBJECT_CREATE)
        # test default value
        ret = cid.get_attr_phase_change()
        self.assertEqual((8, 6), ret)

        # max_compact must < 65536 (64kb)
        with self.assertRaises(ValueError):
            cid.set_attr_phase_change(65536, 6)

        # Using dense attributes storage to avoid 64kb size limitation
        # for a single attribute in compact attribute storage.
        cid.set_attr_phase_change(0, 0)
        self.assertEqual((0, 0), cid.get_attr_phase_change())
Exemple #28
0
def _write_dset_low(dset, data, arr_slice, collective=False):
    memory_space = h5s.create_simple(data.shape)
    file_space = dset.id.get_space()

    s = (arr_slice[0].start,arr_slice[1].start,arr_slice[2].start)
    e = (arr_slice[0].stop,arr_slice[1].stop,arr_slice[2].stop)

    count = tuple([ee - ss for ss,ee in zip(s,e)])

    file_space.select_hyperslab(s, count)

    if collective:
        dxpl = h5p.create(h5p.DATASET_XFER)
        dxpl.set_dxpl_mpio(h5fd.MPIO_COLLECTIVE)
    else:
        dxpl = None

    dset.id.write(memory_space, file_space,
            np.ascontiguousarray(data),dxpl=dxpl)
Exemple #29
0
def make_fid(name, mode, userblock_size, fapl):
    """ Get a new FileID by opening or creating a file.
    Also validates mode argument."""

    fcpl = None
    if userblock_size is not None:
        if mode in ('r', 'r+'):
            raise ValueError(
                "User block may only be specified when creating a file")
        try:
            userblock_size = int(userblock_size)
        except (TypeError, ValueError):
            raise ValueError("User block size must be an integer")
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_userblock(userblock_size)

    if mode == 'r':
        fid = h5f.open(name, h5f.ACC_RDONLY, fapl=fapl)
    elif mode == 'r+':
        fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
    elif mode == 'w-':
        fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    elif mode == 'w':
        fid = h5f.create(name, h5f.ACC_TRUNC, fapl=fapl, fcpl=fcpl)
    elif mode == 'a' or mode is None:
        try:
            fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
            try:
                existing_fcpl = fid.get_create_plist()
                if userblock_size is not None and existing_fcpl.get_userblock(
                ) != userblock_size:
                    raise ValueError(
                        "Requested userblock size (%d) does not match that of existing file (%d)"
                        % (userblock_size, existing_fcpl.get_userblock()))
            except:
                fid.close()
                raise
        except IOError:
            fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    else:
        raise ValueError("Invalid mode; must be one of r, r+, w, w-, a")

    return fid
Exemple #30
0
def make_fid(name, mode, userblock_size, fapl):
    """ Get a new FileID by opening or creating a file.
    Also validates mode argument."""

    fcpl = None
    if userblock_size is not None:
        if mode in ('r', 'r+'):
            raise ValueError("User block may only be specified "
                             "when creating a file")
        try:
            userblock_size = int(userblock_size)
        except (TypeError, ValueError):
            raise ValueError("User block size must be an integer")
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_userblock(userblock_size)

    if mode == 'r':
        fid = h5f.open(name, h5f.ACC_RDONLY, fapl=fapl)
    elif mode == 'r+':
        fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
    elif mode == 'w-':
        fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    elif mode == 'w':
        fid = h5f.create(name, h5f.ACC_TRUNC, fapl=fapl, fcpl=fcpl)
    elif mode == 'a' or mode is None:
        try:
            fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
            try:
                existing_fcpl = fid.get_create_plist()
                if (userblock_size is not None and
                        existing_fcpl.get_userblock() != userblock_size):
                    raise ValueError("Requested userblock size (%d) does not match that of existing file (%d)" % (userblock_size, existing_fcpl.get_userblock()))
            except:
                fid.close()
                raise
        except IOError:
            fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
    else:
        raise ValueError("Invalid mode; must be one of r, r+, w, w-, a")

    return fid
Exemple #31
0
def main():
    sec2j.init()
    fapl = h5p.create(h5p.FILE_ACCESS)
    print fapl
    print 'fapl.id:', fapl.id
    sec2j.set_fapl(fapl.id)
    fname = "test.h5"
    if os.path.exists(fname):
        print 'Opening the file...'
        fid = h5f.open(fname, fapl=fapl)
    else:
        fid = h5f.create(fname, flags=h5f.ACC_TRUNC, fapl=fapl)
    print 'fid ready:', fid.id
    #return
    f = h5py.File(fid)
    sec2j.tx_start(fid.id)
    g = f.require_group('bbic/volume/0')
    for i in range(10000):
        g.attrs.create('a%d' % i, 640)
    f.flush()
    os._exit(-1)
Exemple #32
0
def open_file(name,
              mode=None,
              driver=None,
              libver=None,
              userblock_size=None,
              **kwargs):
    """Open an ARF file, creating as necessary.

    Use this instead of h5py.File to ensure that root-level attributes and group
    creation property lists are set correctly.

    """
    import os
    from h5py import h5p
    from h5py._hl import files

    exists = os.path.exists(name)
    try:
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_link_creation_order(h5p.CRT_ORDER_TRACKED
                                     | h5p.CRT_ORDER_INDEXED)
    except AttributeError:
        # older version of h5py
        fp = files.File(name,
                        mode=mode,
                        driver=driver,
                        libver=libver,
                        **kwargs)
    else:
        fapl = files.make_fapl(driver, libver, **kwargs)
        fp = files.File(files.make_fid(name, mode, userblock_size, fapl, fcpl))

    if not exists and fp.mode == 'r+':
        set_attributes(fp,
                       arf_library='python',
                       arf_library_version=__version__,
                       arf_version=spec_version)
    return fp
Exemple #33
0
def open_file(name, mode=None, driver=None, libver=None, userblock_size=None, **kwargs):
    """Open an ARF file, creating as necessary.

    Use this instead of h5py.File to ensure that root-level attributes and group
    creation property lists are set correctly.

    """
    import sys
    import os
    from h5py import h5p
    from h5py._hl import files

    try:
        # If the byte string doesn't match the default
        # encoding, just pass it on as-is.  Note Unicode
        # objects can always be encoded.
        name = name.encode(sys.getfilesystemencoding())
    except (UnicodeError, LookupError):
        pass
    exists = os.path.exists(name)
    try:
        fcpl = h5p.create(h5p.FILE_CREATE)
        fcpl.set_link_creation_order(
            h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED)
    except AttributeError:
        # older version of h5py
        fp = files.File(name, mode=mode, driver=driver,
                        libver=libver, **kwargs)
    else:
        fapl = files.make_fapl(driver, libver, **kwargs)
        fp = files.File(files.make_fid(name, mode, userblock_size, fapl, fcpl))

    if not exists and fp.mode == 'r+':
        set_attributes(fp,
                       arf_library='python',
                       arf_library_version=__version__,
                       arf_version=spec_version)
    return fp
Exemple #34
0
    def copy(self, source, dest, name=None,
             shallow=False, expand_soft=False, expand_external=False,
             expand_refs=False, without_attrs=False):
        """Copy an object or group.

        The source can be a path, Group, Dataset, or Datatype object.  The
        destination can be either a path or a Group object.  The source and
        destinations need not be in the same file.

        If the source is a Group object, all objects contained in that group
        will be copied recursively.

        When the destination is a Group object, by default the target will
        be created in that group with its current name (basename of obj.name).
        You can override that by setting "name" to a string.

        There are various options which all default to "False":

         - shallow: copy only immediate members of a group.

         - expand_soft: expand soft links into new objects.

         - expand_external: expand external links into new objects.

         - expand_refs: copy objects that are pointed to by references.

         - without_attrs: copy object without copying attributes.

       Example:

        >>> f = File('myfile.hdf5')
        >>> f.listnames()
        ['MyGroup']
        >>> f.copy('MyGroup', 'MyCopy')
        >>> f.listnames()
        ['MyGroup', 'MyCopy']

        """
        if isinstance(source, HLObject):
            source_path = '.'
        else:
            # Interpret source as a path relative to this group
            source_path = source
            source = self

        if isinstance(dest, Group):
            if name is not None:
                dest_path = name
            else:
                # copy source into dest group: dest_name/source_name
                dest_path = pp.basename(h5i.get_name(source[source_path].id))

        elif isinstance(dest, HLObject):
            raise TypeError("Destination must be path or Group object")
        else:
            # Interpret destination as a path relative to this group
            dest_path = dest
            dest = self

        flags = 0
        if shallow:
            flags |= h5o.COPY_SHALLOW_HIERARCHY_FLAG
        if expand_soft:
            flags |= h5o.COPY_EXPAND_SOFT_LINK_FLAG
        if expand_external:
            flags |= h5o.COPY_EXPAND_EXT_LINK_FLAG
        if expand_refs:
            flags |= h5o.COPY_EXPAND_REFERENCE_FLAG
        if without_attrs:
            flags |= h5o.COPY_WITHOUT_ATTR_FLAG
        if flags:
            copypl = h5p.create(h5p.OBJECT_COPY)
            copypl.set_copy_object(flags)
        else:
            copypl = None

        h5o.copy(source.id, self._e(source_path), dest.id, self._e(dest_path),
                 copypl, base.dlcpl)
Exemple #35
0
def generate_dcpl(shape, dtype, chunks, compression, compression_opts,
                  shuffle, fletcher32, maxshape, scaleoffset):
    """ Generate a dataset creation property list.

    Undocumented and subject to change without warning.
    """

    if shape == ():
        if any((chunks, compression, compression_opts, shuffle, fletcher32,
                scaleoffset is not None)):
            raise TypeError("Scalar datasets don't support chunk/filter options")
        if maxshape and maxshape != ():
            raise TypeError("Scalar datasets cannot be extended")
        return h5p.create(h5p.DATASET_CREATE)

    def rq_tuple(tpl, name):
        """ Check if chunks/maxshape match dataset rank """
        if tpl in (None, True):
            return
        try:
            tpl = tuple(tpl)
        except TypeError:
            raise TypeError('"%s" argument must be None or a sequence object' % name)
        if len(tpl) != len(shape):
            raise ValueError('"%s" must have same rank as dataset shape' % name)

    rq_tuple(chunks, 'chunks')
    rq_tuple(maxshape, 'maxshape')

    if compression is not None:

        if compression not in encode:
            raise ValueError('Compression filter "%s" is unavailable' % compression)

        if compression == 'gzip':
            if compression_opts is None:
                gzip_level = DEFAULT_GZIP
            elif compression_opts in range(10):
                gzip_level = compression_opts
            else:
                raise ValueError("GZIP setting must be an integer from 0-9, not %r" % compression_opts)

        elif compression == 'lzf':
            if compression_opts is not None:
                raise ValueError("LZF compression filter accepts no options")

        elif compression == 'szip':
            if compression_opts is None:
                compression_opts = DEFAULT_SZIP

            err = "SZIP options must be a 2-tuple ('ec'|'nn', even integer 0-32)"
            try:
                szmethod, szpix = compression_opts
            except TypeError:
                raise TypeError(err)
            if szmethod not in ('ec', 'nn'):
                raise ValueError(err)
            if not (0<szpix<=32 and szpix%2 == 0):
                raise ValueError(err)

    elif compression_opts is not None:
        # Can't specify just compression_opts by itself.
        raise TypeError("Compression method must be specified")
    
    if scaleoffset is not None:
        # scaleoffset must be an integer when it is not None or False,
        # except for integral data, for which scaleoffset == True is
        # permissible (will use SO_INT_MINBITS_DEFAULT)
        
        if scaleoffset < 0:
            raise ValueError('scale factor must be >= 0')
                
        if dtype.kind == 'f':
            if scaleoffset is True:
                raise ValueError('integer scaleoffset must be provided for '
                                 'floating point types')
        elif dtype.kind in ('u', 'i'):
            if scaleoffset is True:
                scaleoffset = h5z.SO_INT_MINBITS_DEFAULT
        else:
            raise TypeError('scale/offset filter only supported for integer '
                            'and floating-point types')
        
        # Scale/offset following fletcher32 in the filter chain will (almost?)
        # always triggera a read error, as most scale/offset settings are
        # lossy. Since fletcher32 must come first (see comment below) we
        # simply prohibit the combination of fletcher32 and scale/offset.
        if fletcher32:
            raise ValueError('fletcher32 cannot be used with potentially lossy'
                             ' scale/offset filter')
    # End argument validation

    if (chunks is True) or \
    (chunks is None and any((shuffle, fletcher32, compression, maxshape, 
                             scaleoffset is not None))):
        chunks = guess_chunk(shape, maxshape, dtype.itemsize)
        
    if maxshape is True:
        maxshape = (None,)*len(shape)

    plist = h5p.create(h5p.DATASET_CREATE)
    if chunks is not None:
        plist.set_chunk(chunks)
        plist.set_fill_time(h5d.FILL_TIME_ALLOC)  # prevent resize glitch

    # MUST be first, to prevent 1.6/1.8 compatibility glitch
    if fletcher32:
        plist.set_fletcher32()

    # scale-offset must come before shuffle and compression
    if scaleoffset is not None:
        if dtype.kind in ('u', 'i'):
            plist.set_scaleoffset(h5z.SO_INT, scaleoffset)
        else: # dtype.kind == 'f'
            plist.set_scaleoffset(h5z.SO_FLOAT_DSCALE, scaleoffset)

    if shuffle:
        plist.set_shuffle()

    if compression == 'gzip':
        plist.set_deflate(gzip_level)
    elif compression == 'lzf':
        plist.set_filter(h5z.FILTER_LZF, h5z.FLAG_OPTIONAL)
    elif compression == 'szip':
        opts = {'ec': h5z.SZIP_EC_OPTION_MASK, 'nn': h5z.SZIP_NN_OPTION_MASK}
        plist.set_szip(opts[szmethod], szpix)

    return plist
Exemple #36
0
def generate_dcpl(shape, dtype, chunks, compression, compression_opts, shuffle,
                  fletcher32, maxshape):
    """ Generate a dataset creation property list.

    Undocumented and subject to change without warning.
    """

    # Scalar datasets don't support any fancy features
    # However, the currently defined behavior is simply to ignore these options
    if shape == ():
        #if any((chunks, compression, compression_opts, shuffle, fletcher32)):
        #    raise TypeError("Scalar datasets don't support chunk/filter options")
        if maxshape and maxshape != ():
            raise TypeError("Scalar datasets cannot be extended")
        return h5p.create(h5p.DATASET_CREATE)

    def rq_tuple(tpl, name):
        """ Check if chunks/maxshape match dataset rank """
        if tpl in (None, True):
            return
        try:
            tpl = tuple(tpl)
        except TypeError:
            raise TypeError('"%s" argument must be None or a sequence object' %
                            name)
        if len(tpl) != len(shape):
            raise ValueError('"%s" must have same rank as dataset shape' %
                             name)

    rq_tuple(chunks, 'chunks')
    rq_tuple(maxshape, 'maxshape')

    if compression is not None:

        if compression not in encode:
            raise ValueError('Compression filter "%s" is unavailable' %
                             compression)

        if compression == 'gzip':
            if compression_opts is None:
                gzip_level = DEFAULT_GZIP
            elif compression_opts in range(10):
                gzip_level = compression_opts
            else:
                raise ValueError(
                    "GZIP setting must be an integer from 0-9, not %r" %
                    compression_opts)

        elif compression == 'lzf':
            if compression_opts is not None:
                raise ValueError("LZF compression filter accepts no options")

        elif compression == 'szip':
            if compression_opts is None:
                compression_opts = DEFAULT_SZIP

            err = "SZIP options must be a 2-tuple ('ec'|'nn', even integer 0-32)"
            try:
                szmethod, szpix = compression_opts
            except TypeError:
                raise TypeError(err)
            if szmethod not in ('ec', 'nn'):
                raise ValueError(err)
            if not (0 < szpix <= 32 and szpix % 2 == 0):
                raise ValueError(err)

    elif compression_opts is not None:
        # Can't specify just compression_opts by itself.
        raise TypeError("Compression method must be specified")

    # End argument validation

    if (chunks is True) or \
    (chunks is None and any((shuffle, fletcher32, compression, maxshape))):
        chunks = guess_chunk(shape, dtype.itemsize)

    if maxshape is True:
        maxshape = (None, ) * len(shape)

    plist = h5p.create(h5p.DATASET_CREATE)
    if chunks is not None:
        plist.set_chunk(chunks)
        plist.set_fill_time(h5d.FILL_TIME_ALLOC)  # prevent resize glitch

    # MUST be first, to prevent 1.6/1.8 compatibility glitch
    if fletcher32:
        plist.set_fletcher32()

    if shuffle:
        plist.set_shuffle()

    if compression == 'gzip':
        plist.set_deflate(gzip_level)
    elif compression == 'lzf':
        plist.set_filter(h5z.FILTER_LZF, h5z.FLAG_OPTIONAL)
    elif compression == 'szip':
        opts = {'ec': h5z.SZIP_EC_OPTION_MASK, 'nn': h5z.SZIP_NN_OPTION_MASK}
        plist.set_szip(opts[szmethod], szpix)

    return plist
Exemple #37
0
    def setup_fid(self, name):
        self.fname = res.get_data_copy(name)

        plist = h5p.create(h5p.FILE_ACCESS)
        plist.set_fclose_degree(h5f.CLOSE_STRONG)
        self.fid = h5f.open(self.fname, h5f.ACC_RDWR, fapl=plist)
Exemple #38
0
 def test_mdc_config(self):
     '''test get/set mdc config '''
     falist = h5p.create(h5p.FILE_ACCESS)
 
     config = falist.get_mdc_config()
     falist.set_mdc_config(config)
Exemple #39
0
def _make_new_dset(shape=None,
                   dtype=None,
                   data=None,
                   chunks=None,
                   compression=None,
                   shuffle=None,
                   fletcher32=None,
                   maxshape=None,
                   compression_opts=None,
                   fillvalue=None,
                   scaleoffset=None,
                   track_times=None,
                   external=None,
                   track_order=None,
                   dcpl=None):
    """ Return a new low-level dataset identifier """

    # Convert data to a C-contiguous ndarray
    if data is not None and not isinstance(data, Empty):
        # normalize strings -> np.dtype objects
        if dtype is not None:
            _dtype = np.dtype(dtype)
        else:
            _dtype = None

        # if we are going to a f2 datatype, pre-convert in python
        # to workaround a possible h5py bug in the conversion.
        is_small_float = (_dtype is not None and _dtype.kind == 'f'
                          and _dtype.itemsize == 2)
        data = np.asarray(
            data,
            order="C",
            dtype=(_dtype if is_small_float else guess_dtype(data)))

    # Validate shape
    if shape is None:
        if data is None:
            if dtype is None:
                raise TypeError(
                    "One of data, shape or dtype must be specified")
            data = Empty(dtype)
        shape = data.shape
    else:
        shape = (shape, ) if isinstance(shape, int) else tuple(shape)
        if data is not None and (np.product(shape, dtype=np.ulonglong) !=
                                 np.product(data.shape, dtype=np.ulonglong)):
            raise ValueError("Shape tuple is incompatible with data")

    if isinstance(maxshape, int):
        maxshape = (maxshape, )

    # Validate chunk shape
    if isinstance(chunks, int) and not isinstance(chunks, bool):
        chunks = (chunks, )

    # The original make_new_dset errors here if the shape is less than the
    # chunk size, but we avoid doing that as we cannot change the chunk size
    # for a dataset for any version once it is created. See #34.

    if isinstance(dtype, Datatype):
        # Named types are used as-is
        tid = dtype.id
        dtype = tid.dtype  # Following code needs this
    else:
        # Validate dtype
        if dtype is None and data is None:
            dtype = np.dtype("=f4")
        elif dtype is None and data is not None:
            dtype = data.dtype
        else:
            dtype = np.dtype(dtype)
        tid = h5t.py_create(dtype, logical=1)

    # Legacy
    if any((compression, shuffle, fletcher32, maxshape,
            scaleoffset)) and chunks is False:
        raise ValueError("Chunked format required for given storage options")

    # Legacy
    if compression is True:
        if compression_opts is None:
            compression_opts = 4
        compression = 'gzip'

    # Legacy
    if compression in _LEGACY_GZIP_COMPRESSION_VALS:
        if compression_opts is not None:
            raise TypeError("Conflict in compression options")
        compression_opts = compression
        compression = 'gzip'
    dcpl = filters.fill_dcpl(dcpl or h5p.create(h5p.DATASET_CREATE), shape,
                             dtype, chunks, compression, compression_opts,
                             shuffle, fletcher32, maxshape, scaleoffset,
                             external)

    if fillvalue is not None:
        fillvalue = np.array(fillvalue)
        dcpl.set_fill_value(fillvalue)

    if track_times in (True, False):
        dcpl.set_obj_track_times(track_times)
    elif track_times is not None:
        raise TypeError("track_times must be either True or False")
    if track_order == True:
        dcpl.set_attr_creation_order(h5p.CRT_ORDER_TRACKED
                                     | h5p.CRT_ORDER_INDEXED)
    elif track_order == False:
        dcpl.set_attr_creation_order(0)
    elif track_order is not None:
        raise TypeError("track_order must be either True or False")

    if maxshape is not None:
        maxshape = tuple(m if m is not None else h5s.UNLIMITED
                         for m in maxshape)

    if isinstance(data, Empty):
        raise NotImplementedError("Empty datasets")
    return data
Exemple #40
0
from mpi4py import MPI
#Get MPI info
comm = MPI.COMM_WORLD
NPROCS_LOCAL = int(os.environ['OMPI_COMM_WORLD_LOCAL_SIZE'])
#Get number of processes
NPROCS = comm.size
#Get rank
rank = comm.rank

#H5PY for storage
import h5py
from h5py import h5s

from h5py import h5p, h5fd

dxpl = h5p.create(h5p.DATASET_XFER)
dxpl.set_dxpl_mpio(h5fd.MPIO_COLLECTIVE)


def task(rk, l):
    b = rk * l
    return (b, b + l)


class Bunch(object):
    def __init__(self, adict):
        self.__dict__.update(adict)


def factors(n):
    return sorted(set(reduce(list.__add__, ([i, n // i] for i in range(
Exemple #41
0
def generate_dcpl(shape, dtype, chunks, compression, compression_opts,
                  shuffle, fletcher32, maxshape):
    """ Generate a dataset creation property list.

    Undocumented and subject to change without warning.
    """

    if shape == ():
        if any((chunks, compression, compression_opts, shuffle, fletcher32)):
            raise TypeError("Scalar datasets don't support chunk/filter options")
        if maxshape and maxshape != ():
            raise TypeError("Scalar datasets cannot be extended")
        return h5p.create(h5p.DATASET_CREATE)

    def rq_tuple(tpl, name):
        """ Check if chunks/maxshape match dataset rank """
        if tpl in (None, True):
            return
        try:
            tpl = tuple(tpl)
        except TypeError:
            raise TypeError('"%s" argument must be None or a sequence object' % name)
        if len(tpl) != len(shape):
            raise ValueError('"%s" must have same rank as dataset shape' % name)

    rq_tuple(chunks, 'chunks')
    rq_tuple(maxshape, 'maxshape')

    if compression is not None:

        if compression not in encode:
            raise ValueError('Compression filter "%s" is unavailable' % compression)

        if compression == 'gzip':
            if compression_opts is None:
                gzip_level = DEFAULT_GZIP
            elif compression_opts in range(10):
                gzip_level = compression_opts
            else:
                raise ValueError("GZIP setting must be an integer from 0-9, not %r" % compression_opts)

        elif compression == 'lzf':
            if compression_opts is not None:
                raise ValueError("LZF compression filter accepts no options")

        elif compression == 'szip':
            if compression_opts is None:
                compression_opts = DEFAULT_SZIP

            err = "SZIP options must be a 2-tuple ('ec'|'nn', even integer 0-32)"
            try:
                szmethod, szpix = compression_opts
            except TypeError:
                raise TypeError(err)
            if szmethod not in ('ec', 'nn'):
                raise ValueError(err)
            if not (0<szpix<=32 and szpix%2 == 0):
                raise ValueError(err)

    elif compression_opts is not None:
        # Can't specify just compression_opts by itself.
        raise TypeError("Compression method must be specified")

    # End argument validation

    if (chunks is True) or \
    (chunks is None and any((shuffle, fletcher32, compression, maxshape))):
        chunks = guess_chunk(shape, maxshape, dtype.itemsize)
        
    if maxshape is True:
        maxshape = (None,)*len(shape)

    plist = h5p.create(h5p.DATASET_CREATE)
    if chunks is not None:
        plist.set_chunk(chunks)
        plist.set_fill_time(h5d.FILL_TIME_ALLOC)  # prevent resize glitch

    # MUST be first, to prevent 1.6/1.8 compatibility glitch
    if fletcher32:
        plist.set_fletcher32()

    if shuffle:
        plist.set_shuffle()

    if compression == 'gzip':
        plist.set_deflate(gzip_level)
    elif compression == 'lzf':
        plist.set_filter(h5z.FILTER_LZF, h5z.FLAG_OPTIONAL)
    elif compression == 'szip':
        opts = {'ec': h5z.SZIP_EC_OPTION_MASK, 'nn': h5z.SZIP_NN_OPTION_MASK}
        plist.set_szip(opts[szmethod], szpix)

    return plist
Exemple #42
0
    def test_mdc_config(self):
        '''test get/set mdc config '''
        falist = h5p.create(h5p.FILE_ACCESS)

        config = falist.get_mdc_config()
        falist.set_mdc_config(config)
Exemple #43
0
 def test_libver_v110(self):
     """ Test libver bounds set/get for H5F_LIBVER_V110"""
     plist = h5p.create(h5p.FILE_ACCESS)
     plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V110)
     self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V110),
                      plist.get_libver_bounds())
Exemple #44
0
 def test_libver(self):
     """ Test libver bounds set/get """
     plist = h5p.create(h5p.FILE_ACCESS)
     plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST)
     self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST),
                      plist.get_libver_bounds())
Exemple #45
0
def default_lcpl():
    """ Default link creation property list """
    lcpl = h5p.create(h5p.LINK_CREATE)
    lcpl.set_create_intermediate_group(True)
    return lcpl
Exemple #46
0
def generate_dcpl(shape, dtype, chunks, compression, compression_opts, shuffle,
                  fletcher32, maxshape, scaleoffset):
    """ Generate a dataset creation property list.

    Undocumented and subject to change without warning.
    """

    if shape == ():
        if any((chunks, compression, compression_opts, shuffle, fletcher32,
                scaleoffset is not None)):
            raise TypeError(
                "Scalar datasets don't support chunk/filter options")
        if maxshape and maxshape != ():
            raise TypeError("Scalar datasets cannot be extended")
        return h5p.create(h5p.DATASET_CREATE)

    def rq_tuple(tpl, name):
        """ Check if chunks/maxshape match dataset rank """
        if tpl in (None, True):
            return
        try:
            tpl = tuple(tpl)
        except TypeError:
            raise TypeError('"%s" argument must be None or a sequence object' %
                            name)
        if len(tpl) != len(shape):
            raise ValueError('"%s" must have same rank as dataset shape' %
                             name)

    rq_tuple(chunks, 'chunks')
    rq_tuple(maxshape, 'maxshape')

    if compression is not None:

        if compression not in encode:
            raise ValueError('Compression filter "%s" is unavailable' %
                             compression)

        if compression == 'gzip':
            if compression_opts is None:
                gzip_level = DEFAULT_GZIP
            elif compression_opts in range(10):
                gzip_level = compression_opts
            else:
                raise ValueError(
                    "GZIP setting must be an integer from 0-9, not %r" %
                    compression_opts)

        elif compression == 'lzf':
            if compression_opts is not None:
                raise ValueError("LZF compression filter accepts no options")

        elif compression == 'szip':
            if compression_opts is None:
                compression_opts = DEFAULT_SZIP

            err = "SZIP options must be a 2-tuple ('ec'|'nn', even integer 0-32)"
            try:
                szmethod, szpix = compression_opts
            except TypeError:
                raise TypeError(err)
            if szmethod not in ('ec', 'nn'):
                raise ValueError(err)
            if not (0 < szpix <= 32 and szpix % 2 == 0):
                raise ValueError(err)

    elif compression_opts is not None:
        # Can't specify just compression_opts by itself.
        raise TypeError("Compression method must be specified")

    if scaleoffset is not None:
        # scaleoffset must be an integer when it is not None or False,
        # except for integral data, for which scaleoffset == True is
        # permissible (will use SO_INT_MINBITS_DEFAULT)

        if scaleoffset < 0:
            raise ValueError('scale factor must be >= 0')

        if dtype.kind == 'f':
            if scaleoffset is True:
                raise ValueError('integer scaleoffset must be provided for '
                                 'floating point types')
        elif dtype.kind in ('u', 'i'):
            if scaleoffset is True:
                scaleoffset = h5z.SO_INT_MINBITS_DEFAULT
        else:
            raise TypeError('scale/offset filter only supported for integer '
                            'and floating-point types')

        # Scale/offset following fletcher32 in the filter chain will (almost?)
        # always triggera a read error, as most scale/offset settings are
        # lossy. Since fletcher32 must come first (see comment below) we
        # simply prohibit the combination of fletcher32 and scale/offset.
        if fletcher32:
            raise ValueError('fletcher32 cannot be used with potentially lossy'
                             ' scale/offset filter')
    # End argument validation

    if (chunks is True) or \
    (chunks is None and any((shuffle, fletcher32, compression, maxshape,
                             scaleoffset is not None))):
        chunks = guess_chunk(shape, maxshape, dtype.itemsize)

    if maxshape is True:
        maxshape = (None, ) * len(shape)

    plist = h5p.create(h5p.DATASET_CREATE)
    if chunks is not None:
        plist.set_chunk(chunks)
        plist.set_fill_time(h5d.FILL_TIME_ALLOC)  # prevent resize glitch

    # MUST be first, to prevent 1.6/1.8 compatibility glitch
    if fletcher32:
        plist.set_fletcher32()

    # scale-offset must come before shuffle and compression
    if scaleoffset is not None:
        if dtype.kind in ('u', 'i'):
            plist.set_scaleoffset(h5z.SO_INT, scaleoffset)
        else:  # dtype.kind == 'f'
            plist.set_scaleoffset(h5z.SO_FLOAT_DSCALE, scaleoffset)

    if shuffle:
        plist.set_shuffle()

    if compression == 'gzip':
        plist.set_deflate(gzip_level)
    elif compression == 'lzf':
        plist.set_filter(h5z.FILTER_LZF, h5z.FLAG_OPTIONAL)
    elif compression == 'szip':
        opts = {'ec': h5z.SZIP_EC_OPTION_MASK, 'nn': h5z.SZIP_NN_OPTION_MASK}
        plist.set_szip(opts[szmethod], szpix)

    return plist
Exemple #47
0
def default_lcpl():
    """ Default link creation property list """
    lcpl = h5p.create(h5p.LINK_CREATE)
    lcpl.set_create_intermediate_group(True)
    return lcpl
Exemple #48
0
 def test_libver(self):
     """ Test libver bounds set/get """
     plist = h5p.create(h5p.FILE_ACCESS)
     plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST)
     self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST),
                      plist.get_libver_bounds())
Exemple #49
0
 def setup_fid(self, name):
     self.fname = res.get_data_copy(name)
     
     plist = h5p.create(h5p.FILE_ACCESS)
     plist.set_fclose_degree(h5f.CLOSE_STRONG)
     self.fid = h5f.open(self.fname, h5f.ACC_RDWR, fapl=plist)