Exemple #1
0
    def test_not_h5_dataset(self):

        for arg in [
                np.arange(3),
                da.from_array(np.arange(3)),
                self.h5_simple_file['MyGroup'],
                self.h5_simple_file,
        ]:
            self.assertFalse(check_if_main(arg))
Exemple #2
0
 def test_invalid_types_for_str_attrs(self):
     for key in [
             'quantity', 'units', 'main_data_name', 'data_type', 'modality',
             'source'
     ]:
         attribute = self.h5_nsid_simple['MyGroup']['data'].attrs[key]
         self.h5_nsid_simple['MyGroup']['data'].attrs[key] = 1
         self.assertFalse(
             check_if_main(self.h5_nsid_simple['MyGroup']['data']))
         self.h5_nsid_simple['MyGroup']['data'].attrs[key] = attribute
Exemple #3
0
    def test_mandatory_attrs_not_present(self):
        for key in [
                'quantity', 'units', 'main_data_name', 'data_type', 'modality',
                'source'
        ]:
            attribute = self.h5_nsid_simple['MyGroup']['data'].attrs[key]

            del self.h5_nsid_simple['MyGroup']['data'].attrs[key]
            self.assertFalse(
                check_if_main(self.h5_nsid_simple['MyGroup']['data']))
            self.h5_nsid_simple['MyGroup']['data'].attrs[key] = attribute
    def read_all(self, recursive=True, parent=None):
        """
        Reads all HDF5 datasets formatted according to NSID specifications.

        Parameters
        ----------
        recursive : bool, default = True
            We might just remove this kwarg
        parent : h5py.Group, Default = None
            HDF5 group under which to read all available datasets.
            By default, all datasets within the HDF5 file are read.

        Returns
        -------
        sidpy.Dataset or list of sidpy.Dataset objects
            Datasets present in the provided file
        """

        if parent is None:
            h5_group = self._h5_file
        else:
            if not isinstance(parent, h5py.Group):
                raise TypeError('parent should be a h5py.Group object')
            self.__validate_obj_in_same_file(parent)
            h5_group = parent

        if recursive:
            list_of_main = self._main_dsets
        else:
            list_of_main = []
            for key in h5_group:
                if isinstance(h5_group[key], h5py.Dataset):
                    if check_if_main(h5_group[key]):
                        list_of_main.append(h5_group[key])

        # Go through each of the identified
        list_of_datasets = []
        for dset in list_of_main:
            list_of_datasets.append(read_h5py_dataset(dset))
        return list_of_datasets
Exemple #5
0
 def test_dset_is_main(self):
     self.assertTrue(check_if_main(self.h5_nsid_simple['MyGroup']['data']))
Exemple #6
0
 def test_dim_sizes_not_matching_main(self):
     h5_nsid_wrong_dim_length = make_nsid_length_dim_wrong()
     self.assertFalse(
         check_if_main(h5_nsid_wrong_dim_length['MyGroup']['data']))
Exemple #7
0
 def test_dim_exist_but_scales_not_attached_to_main(self):
     h5_nsid_no_dim_attached = make_nsid_dataset_no_dim_attached()
     self.assertFalse(
         check_if_main(h5_nsid_no_dim_attached['MyGroup']['data']))
Exemple #8
0
 def test_dims_missing(self):
     self.assertFalse(check_if_main(self.h5_simple_file['MyGroup']['data']))