コード例 #1
0
    def test__get_or_create_node(self):
        file = Mock()
        src_node = Mock()
        file.get_node.return_value = sentinel.node

        node = publicdb._get_or_create_node(file, sentinel.group, src_node)
        file.get_node.assert_called_once_with(sentinel.group, src_node.name)
        self.assertEqual(node, sentinel.node)

        file.get_node.side_effect = tables.NoSuchNodeError('no such node!')
        # Raise exception because type of Mock src_node is not Table or VLArray
        self.assertRaises(Exception, publicdb._get_or_create_node, file,
                          sentinel.group, src_node)

        src_node = Mock(spec=tables.Table)
        src_node.description = sentinel.description
        node = publicdb._get_or_create_node(file, sentinel.group, src_node)
        file.create_table.assert_called_once_with(sentinel.group,
                                                  src_node.name,
                                                  src_node.description,
                                                  src_node.title)

        src_node = Mock(spec=tables.VLArray)
        src_node.atom = sentinel.atom
        node = publicdb._get_or_create_node(file, sentinel.group, src_node)
        file.create_vlarray.assert_called_once_with(sentinel.group,
                                                    src_node.name,
                                                    src_node.atom,
                                                    src_node.title)
コード例 #2
0
ファイル: io.py プロジェクト: Python3pkg/Magni
def read_chases(h5file):
    """
    Read the chases to an HDF5 database.

    Parameters
    ----------
    h5file : tables.file.File
        The handle to the HDF5 database from which the chases are read.

    Returns
    -------
    chasess : dict
        The chases read from the HDF5 database.

    Raises
    ------
    ValueError
        If the chases to the HDF5 database does not conform to the Magni chases
        standard.

    Notes
    -----
    The returned dict holds a key for each chase in the database. The value
    corresponding to a given key is a string. See
    `magni.reproducibility.chase_database` for examples of such chases.

    Examples
    --------
    Read chases from the database named 'db.hdf5':

    >>> import magni
    >>> from magni.reproducibility.io import read_chases
    >>> with magni.utils.multiprocessing.File('db.hdf5', mode='r') as h5file:
    ...    chases = read_chases(h5file)

    """
    @_decorate_validation
    def validate_input():
        _generic('h5file', tables.file.File)

    validate_input()

    try:
        h5_chases = h5file.get_node('/', name='chases')
    except tables.NoSuchNodeError:
        raise tables.NoSuchNodeError('The database has not been chased.')

    h5_chase_dict = h5_chases._v_leaves
    chases = dict()
    try:
        for chase in h5_chase_dict:
            chases[chase] = json.loads(h5_chase_dict[chase].read().decode())
    except ValueError as e:
        raise ValueError('Unable to read the {!r} chase '.format(chase) +
                         'It seems that the chase does not conform to the ' +
                         'Magni chase standard ({!r}).'.format(e.args[0]))

    return chases
コード例 #3
0
def pmt_and_sipm_bin_width(file_name: str) -> Tuple[float, float]:
    """
    returns pmt and sipm bin widths as set in nexus.
    Assumes Pmt + SiPM as in NEW, NEXT100 & DEMO.
    """
    sns_bins = get_sensor_binning(file_name)
    if sns_bins.empty or np.any(sns_bins.bin_width <= 0):
        raise tb.NoSuchNodeError('No useful binning info found')
    pmt_wid = sns_bins.bin_width[sns_bins.index.str.contains('Pmt')].iloc[0]
    sipm_wid = sns_bins.bin_width[sns_bins.index.str.contains('SiPM')].iloc[0]
    return pmt_wid, sipm_wid
コード例 #4
0
ファイル: io.py プロジェクト: Python3pkg/Magni
def read_annotations(h5file):
    """
    Read the annotations to an HDF5 database.

    Parameters
    ----------
    h5file : tables.file.File
        The handle to the HDF5 database from which the annotations are read.

    Returns
    -------
    annotations : dict
        The annotations read from the HDF5 database.

    Raises
    ------
    ValueError
        If the annotations to the HDF5 database does not conform to the Magni
        annotation standard.

    Notes
    -----
    The returned dict holds a key for each annotation in the database. The
    value corresponding to a given key is in itself a dict. See
    `magni.reproducibility.annotate_database` for examples of such annotations.

    Examples
    --------
    Read annotations from the database named 'db.hdf5':

    >>> import magni
    >>> from magni.reproducibility.io import read_annotations
    >>> with magni.utils.multiprocessing.File('db.hdf5', mode='r') as h5file:
    ...    annotations = read_annotations(h5file)

    """
    @_decorate_validation
    def validate_input():
        _generic('h5file', tables.file.File)

    validate_input()

    try:
        h5_annotations = h5file.get_node('/', name='annotations')
    except tables.NoSuchNodeError:
        raise tables.NoSuchNodeError('The database has not been annotated.')

    annotations = dict()
    _recursive_annotation_read(h5_annotations, annotations)

    return annotations
コード例 #5
0
    def test__get_or_create_group(self):
        file = Mock()
        file.get_node.return_value = sentinel.file_group
        group = publicdb._get_or_create_group(file, sentinel.group)
        self.assertEqual(group, sentinel.file_group)

        file = Mock()
        file.get_node.side_effect = tables.NoSuchNodeError('no such node!')
        in_group = '/hisparc/station_501'
        out_group = publicdb._get_or_create_group(file, in_group)
        file.create_group.assert_called_once_with('/hisparc',
                                                  'station_501',
                                                  'Data group',
                                                  createparents=True)
        self.assertEqual(file.create_group.return_value, out_group)
コード例 #6
0
ファイル: filenode.py プロジェクト: esi-neuroscience/PyTables
def read_from_filenode(h5file, filename, where, name=None, overwrite=False,
                       create_target=False):
    r"""Read a filenode from a PyTables file and write its contents to a file.

    .. versionadded:: 3.2

    Parameters
    ----------
    h5file
      The PyTables file to be read from; can be either a string
      giving the file's location or a :class:`File` object.

    filename
      Path of the file where the contents of the filenode shall be
      written to.  If *filename* points to a directory or ends with
      ``/`` (``\`` on Windows), the filename will be set to the
      *_filename* (if present; otherwise the *name*) attribute of the
      read filenode.

    where, name
      Location of the filenode where the data shall be read from.  If
      no node *name* can be found at *where*, the first node at
      *where* whose *_filename* attribute matches *name* will be read.

    overwrite
      Whether or not a possibly existing file of the specified
      *filename* shall be overwritten.

    create_target
      Whether or not the folder hierarchy needed to accomodate the
      given target ``filename`` will be created.

    """
    new_h5file = not isinstance(h5file, tables.file.File)
    f = tables.File(h5file, "r") if new_h5file else h5file
    try:
        fnode = open_node(f.get_node(where=where, name=name))
    except tables.NoSuchNodeError:
        fnode = None
        for n in f.walk_nodes(where=where, classname="EArray"):
            if n.attrs._filename == name:
                fnode = open_node(n)
                break
        if fnode is None:
            f.close()
            raise tables.NoSuchNodeError("A filenode '%s' cannot be found at "
                                         "'%s'" % (name, where))

    # guess output filename if necessary
    if os.path.isdir(filename) or filename.endswith(os.path.sep):
        try:
            filename = os.path.join(filename, fnode.node.attrs._filename)
        except Exception:
            filename = os.path.join(filename, fnode.node.name)

    if os.access(filename, os.R_OK) and not overwrite:
        if new_h5file:
            f.close()
        raise IOError("The file '%s' already exists" % filename)

    # create folder hierarchy if necessary
    if create_target and not os.path.isdir(os.path.split(filename)[0]):
        os.makedirs(os.path.split(filename)[0])

    if not os.access(os.path.split(filename)[0], os.W_OK):
        if new_h5file:
            f.close()
        raise IOError("The file '%s' cannot be written to" % filename)

    # read data from filenode
    data = fnode.read()
    fnode.close()

    # store data to file
    with open(filename, "wb") as fd:
        fd.write(data)

    # cleanup
    del data
    if new_h5file:
        f.close()