def test_plugin_marshaller_SubList():
    mc = hdf5storage.MarshallerCollection(load_plugins=True,
                                          lazy_loading=True)
    options = hdf5storage.Options(store_python_metadata=True,
                                  matlab_compatible=False,
                                  marshaller_collection=mc)
    ell = [1, 2, 'b1', b'3991', True, None]
    data = example_hdf5storage_marshaller_plugin.SubList(ell)
    f = None
    name = '/a'
    try:
        f = tempfile.mkstemp()
        os.close(f[0])
        filename = f[1]
        hdf5storage.write(data, path=name, filename=filename,
                          options=options)
        out = hdf5storage.read(path=name, filename=filename,
                               options=options)
    except:
        raise
    finally:
        if f is not None:
            os.remove(f[1])
    assert_equal_nose(ell, list(out))
    assert_equal_nose(type(out),
                      example_hdf5storage_marshaller_plugin.SubList)
Пример #2
0
def check_all_valid_str_keys(tp, option_keywords):
    options = hdf5storage.Options(**option_keywords)
    key_value_names = (options.dict_like_keys_name,
                       options.dict_like_values_name)

    data = random_dict(tp)
    for k in key_value_names:
        if k in data:
            del data[k]

    # Make a random name.
    name = random_name()

    # Write the data to the proper file with the given name with the
    # provided options. The file needs to be deleted after to keep junk
    # from building up.
    fld = None
    try:
        fld = tempfile.mkstemp()
        os.close(fld[0])
        filename = fld[1]
        hdf5storage.write(data, path=name, filename=filename, options=options)

        with h5py.File(filename) as f:
            for k in key_value_names:
                assert escape_path(k) not in f[name]
            for k in data:
                assert escape_path(k) in f[name]
    except:
        raise
    finally:
        if fld is not None:
            os.remove(fld[1])
Пример #3
0
def write_mat(mat_fn, vertices, faces, save_areas=True):
  mat_data = {}
  mat_data[u'vertices'] = vertices
  mat_data[u'faces'] = faces
  if save_areas:
    mat_data[u'areas'] = cal_areas(vertices, faces)
  hs.writes(mat_data, mat_fn, options=hs.Options(matlab_compatible=True))
def test_marshaller_read_approximate_missing_import():
    m = Tmarshaller()
    m.required_parent_modules = ['json']
    m.required_modules = ['aiveneiavie']
    m.python_type_strings = ['ellipsis']
    m.types = ['builtins.ellipsis']
    m.update_type_lookups()
    mc = hdf5storage.MarshallerCollection(lazy_loading=True, marshallers=[m])
    options = hdf5storage.Options(marshaller_collection=mc)

    fld = None
    name = '/the'
    try:
        fld = tempfile.mkstemp()
        os.close(fld[0])
        filename = fld[1]
        with h5py.File(filename, mode='w') as f:
            f.create_dataset(name, data=np.int64([1]))
            f[name].attrs.create('Python.Type', b'ellipsis')
            out = hdf5storage.utilities.read_data(f, f, name, options)
    except:
        raise
    finally:
        if fld is not None:
            os.remove(fld[1])

    assert_equal(out, 'read_approximate')
Пример #5
0
 def __init__(self):
     # The parent does most of the setup. All that has to be changed
     # is turning on the matlab compatibility, and changing the
     # filename.
     TestPythonMatlabFormat.__init__(self)
     self.options = hdf5storage.Options(store_python_metadata=False,
                                        matlab_compatible=True)
    def __getitem__(self, idx):
        image = np.zeros((cfg.C, cfg.H, cfg.W))
        mask = np.zeros((cfg.O, cfg.H, cfg.W))
        img_path = self.img_files[idx % len(self.img_files)].rstrip()

        MatData = hdf5storage.loadmat(
            img_path, options=hdf5storage.Options(matlab_compatible=True))

        # enlarge the matrix to specific channels
        if MatData['I'].ndim == 2:
            for i in range(cfg.C):
                image[i, ...] = MatData['I']
        else:
            raise NotImplementedError

        # generate output channels
        for i in range(cfg.O):
            mask[i, ...] = MatData[cfg.OUTPUT_CHANNELS[i]]

        return [image, mask]


# if __name__ == "__main__":
#     path = "C:/Research/LumbarSpine/Github/unet-segmentation-lumbar/dataset/valid.txt"
#     with open(path, 'r') as file:
#         imgs = file.readlines()
#     print(len(imgs))
#     for i in range(3):
#         img_path = imgs[i % len(imgs)].rstrip()
#         MatData = hdf5storage.loadmat(img_path, options=hdf5storage.Options(matlab_compatible=True))
#         print(MatData['I'].shape)
Пример #7
0
    def __init__(self):
        self.filename = 'data.mat'
        self.options = hdf5storage.Options()

        # Need a list of the supported numeric dtypes to test, excluding
        # those not supported by MATLAB. 'S' and 'U' dtype chars have to
        # be used for the bare byte and unicode string dtypes since the
        # dtype strings (but not chars) are not the same in Python 2 and
        # 3.
        self.dtypes = [
            'bool', 'uint8', 'uint16', 'uint32', 'uint64', 'int8', 'int16',
            'int32', 'int64', 'float32', 'float64', 'complex64', 'complex128',
            'S', 'U'
        ]

        # Define the sizes of random datasets to use.
        self.max_string_length = 10
        self.max_array_axis_length = 8
        self.max_list_length = 6
        self.max_posix_path_depth = 5
        self.max_posix_path_lengths = 17
        self.object_subarray_dimensions = 2
        self.max_object_subarray_axis_length = 5
        self.min_dict_keys = 4
        self.max_dict_keys = 12
        self.max_dict_key_length = 10
        self.dict_value_subarray_dimensions = 2
        self.max_dict_value_subarray_axis_length = 5
        self.min_structured_ndarray_fields = 2
        self.max_structured_ndarray_fields = 5
        self.max_structured_ndarray_field_lengths = 10
        self.max_structured_ndarray_axis_length = 2
        self.structured_ndarray_subarray_dimensions = 2
        self.max_structured_ndarray_subarray_axis_length = 4
Пример #8
0
 def __init__(self):
     # The parent does most of the setup. All that has to be changed
     # is turning MATLAB compatibility off and changing the file
     # name.
     TestPythonMatlabFormat.__init__(self)
     self.options = hdf5storage.Options(matlab_compatible=False)
     self.filename = 'data.h5'
def check_int_key(tp, option_keywords):
    options = hdf5storage.Options(**option_keywords)
    key_value_names = (options.dict_like_keys_name,
                       options.dict_like_values_name)

    data = random_dict(tp)
    for k in key_value_names:
        if k in data:
            del data[k]

    key = random_int()
    data[key] = random_int()

    # Make a random name.
    name = random_name()

    # Write the data to the proper file with the given name with the
    # provided options. The file needs to be deleted after to keep junk
    # from building up.
    fld = None
    try:
        fld = tempfile.mkstemp()
        os.close(fld[0])
        filename = fld[1]
        hdf5storage.write(data, path=name, filename=filename, options=options)

        with h5py.File(filename, mode='r') as f:
            assert_equal_nose(set(key_value_names), set(f[name].keys()))
    except:
        raise
    finally:
        if fld is not None:
            os.remove(fld[1])
Пример #10
0
    def __init__(self):
        # The parent does most of the setup. All that has to be changed
        # is turning off the storage of type information as well as
        # MATLAB compatibility.
        TestPythonMatlabFormat.__init__(self)
        self.options = hdf5storage.Options(store_python_metadata=False,
                                           matlab_compatible=False)

        # Add in float16 to the set of types tested.
        self.dtypes.append('float16')
Пример #11
0
def write_pdir(pdir_fn, vertices, faces, save_normal_dir=True, mat_fn=''):
  vert_nei_vert, vert_nei_face = construct_vert_nei(vertices, faces)
  H, K, k1, k2, shape_index, curvedness_index, pdir1, pdir2 = get_curvature(vertices, vert_nei_vert, nei_k=5)
  f = open(pdir_fn, 'w')
  for i in range(pdir1.shape[0]):
    f.write('%f %f %f\n' % (pdir1[i][0], pdir1[i][1], pdir1[i][2]))
    # f.write('%f %f %f\n' % (pdir2[i][0], pdir2[i][1], pdir2[i][2]))
    # if save_normal_dir:
      # normal_dir = np.cross(pdir1[i], pdir2[i])
      # f.write('%f %f %f\n' % (normal_dir[0], normal_dir[1], normal_dir[2]))
  f.close()
  mat_data = {}
  mat_data[u'shape_index'] = shape_index
  hs.writes(mat_data, mat_fn, options=hs.Options(matlab_compatible=True))
    def __init__(self):
        self.options = hdf5storage.Options()

        # Need a list of the supported numeric dtypes to test, excluding
        # those not supported by MATLAB. 'S' and 'U' dtype chars have to
        # be used for the bare byte and unicode string dtypes since the
        # dtype strings (but not chars) are not the same in Python 2 and
        # 3.
        self.dtypes = [
            'bool', 'uint8', 'uint16', 'uint32', 'uint64', 'int8', 'int16',
            'int32', 'int64', 'float32', 'float64', 'complex64', 'complex128',
            'S', 'U'
        ]

        # Need a list of dict-like types, which will depend on Python
        # version.
        self.dict_like = ['dict', 'OrderedDict']
Пример #13
0
def check_string_type_non_str_key(tp, other_tp, option_keywords):
    options = hdf5storage.Options(**option_keywords)
    key_value_names = (options.dict_like_keys_name,
                       options.dict_like_values_name)

    data = random_dict(tp)
    for k in key_value_names:
        if k in data:
            del data[k]
    keys = list(data.keys())

    key_gen = random_str_some_unicode(max_dict_key_length)
    if other_tp == 'numpy.bytes_':
        key = np.bytes_(key_gen.encode('UTF-8'))
    elif other_tp == 'numpy.unicode_':
        key = np.unicode_(key_gen)
    elif other_tp == 'bytes':
        key = key_gen.encode('UTF-8')
    data[key] = random_int()
    keys.append(key_gen)

    # Make a random name.
    name = random_name()

    # Write the data to the proper file with the given name with the
    # provided options. The file needs to be deleted after to keep junk
    # from building up.
    fld = None
    try:
        fld = tempfile.mkstemp()
        os.close(fld[0])
        filename = fld[1]
        hdf5storage.write(data, path=name, filename=filename, options=options)

        with h5py.File(filename) as f:
            assert_equal_nose(set(keys), set(f[name].keys()))

    except:
        raise
    finally:
        if fld is not None:
            os.remove(fld[1])
Пример #14
0
def write_normal(norm_fn, vertices, faces):
  _, vert_nei_face = construct_vert_nei(vertices, faces)
  v0 = vertices[faces[:,0]]
  v1 = vertices[faces[:,1]]
  v2 = vertices[faces[:,2]]
  e1 = v1 - v0
  e2 = v2 - v0
  face_normal_vec = np.cross(e1, e2)
  face_normal_vec /= np.linalg.norm(face_normal_vec, axis=1, keepdims=True)
  vert_normal_vec = []
  for i in range(len(vert_nei_face)):
    nv = np.zeros(3)
    for vnf in vert_nei_face[i]:
      nv += face_normal_vec[vnf, :]
    nv /= len(vert_nei_face[i])
    vert_normal_vec.append(nv)
  vert_normal_vec = np.array(vert_normal_vec)
  mat_data = {}
  mat_data[u'normals'] = vert_normal_vec
  hs.writes(mat_data, norm_fn, options=hs.Options(matlab_compatible=True))
Пример #15
0
def check_str_key_previously_invalid_char(tp, ch, option_keywords):
    options = hdf5storage.Options(**option_keywords)
    key_value_names = (options.dict_like_keys_name,
                       options.dict_like_values_name)

    data = random_dict(tp)
    for k in key_value_names:
        if k in data:
            del data[k]

    # Add a random invalid str key using the provided character
    key = key_value_names[0]
    while key in key_value_names:
        key = ch.join(
            [random_str_ascii(max_dict_key_length) for i in range(2)])
    data[key] = random_int()

    # Make a random name.
    name = random_name()

    # Write the data to the proper file with the given name with the
    # provided options. The file needs to be deleted after to keep junk
    # from building up.
    fld = None
    try:
        fld = tempfile.mkstemp()
        os.close(fld[0])
        filename = fld[1]
        hdf5storage.write(data, path=name, filename=filename, options=options)

        with h5py.File(filename) as f:
            for k in key_value_names:
                assert escape_path(k) not in f[name]
            for k in data:
                assert escape_path(k) in f[name]
    except:
        raise
    finally:
        if fld is not None:
            os.remove(fld[1])
Пример #16
0
import numpy as np

import sys
import pickle
import hdf5storage
from sklearn import linear_model

sys.path.append('../../analysis/bin/python_nmf_micro/')
import utils as ox

#Read in csv with subject demographics
df_sorted = pd.read_csv(
    '../../raw_data/sheets/07-04-20-McGillData_WH_Exprodo-Report_IncExc_CR_CRmed_cham_CRtopfdemeduc_civetpass_slopes_sorted.csv'
)
options = hdf5storage.Options(oned_as='column',
                              matlab_compatible=True,
                              action_for_matlab_incompatible='error')

#LOAD CIVET MASK TO IDENTIFY MIDLINE/ CORPOS COLLOSUM REGION
#IDENTIFY 'VALID VERTICES' - IE VERTICES NOT IN THIS REGION
left_mask = np.loadtxt('../surfsamp/mask_files/CIVET_2.0_mask_left_short.txt')
left_valid = np.where(
    left_mask == 1)  # list of valid indices in civet .txt file
left_invalid = np.where(
    left_mask == 0)  #list of invalid indices in civet .txt file

right_mask = np.loadtxt(
    '../surfsamp/mask_files/CIVET_2.0_mask_right_short.txt')
right_valid = (np.where(right_mask == 1))
right_invalid = (np.where(right_mask == 0))
#38561 valid vertices