示例#1
0
    def test_load_file_with_wrong_information(self):
        trk_file = open(DATA['simple_trk_fname'], 'rb').read()

        # Simulate a TRK file where `count` was not provided.
        count = np.array(0, dtype="int32").tostring()
        new_trk_file = trk_file[:1000-12] + count + trk_file[1000-8:]
        trk = TrkFile.load(BytesIO(new_trk_file), lazy_load=False)
        assert_tractogram_equal(trk.tractogram, DATA['simple_tractogram'])

        # Simulate a TRK where `vox_to_ras` is not recorded (i.e. all zeros).
        vox_to_ras = np.zeros((4, 4), dtype=np.float32).tostring()
        new_trk_file = trk_file[:440] + vox_to_ras + trk_file[440+64:]
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            trk = TrkFile.load(BytesIO(new_trk_file))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("identity" in str(w[0].message))
            assert_array_equal(trk.affine, np.eye(4))

        # Simulate a TRK where `vox_to_ras` is invalid.
        vox_to_ras = np.zeros((4, 4), dtype=np.float32)
        vox_to_ras[3, 3] = 1
        vox_to_ras = vox_to_ras.tostring()
        new_trk_file = trk_file[:440] + vox_to_ras + trk_file[440+64:]
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            assert_raises(HeaderError, TrkFile.load, BytesIO(new_trk_file))

        # Simulate a TRK file where `voxel_order` was not provided.
        voxel_order = np.zeros(1, dtype="|S3").tostring()
        new_trk_file = trk_file[:948] + voxel_order + trk_file[948+3:]
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            TrkFile.load(BytesIO(new_trk_file))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("LPS" in str(w[0].message))

        # Simulate a TRK file with an unsupported version.
        version = np.int32(123).tostring()
        new_trk_file = trk_file[:992] + version + trk_file[992+4:]
        assert_raises(HeaderError, TrkFile.load, BytesIO(new_trk_file))

        # Simulate a TRK file with a wrong hdr_size.
        hdr_size = np.int32(1234).tostring()
        new_trk_file = trk_file[:996] + hdr_size + trk_file[996+4:]
        assert_raises(HeaderError, TrkFile.load, BytesIO(new_trk_file))

        # Simulate a TRK file with a wrong scalar_name.
        trk_file = open(DATA['complex_trk_fname'], 'rb').read()
        noise = np.int32(42).tostring()
        new_trk_file = trk_file[:47] + noise + trk_file[47+4:]
        assert_raises(HeaderError, TrkFile.load, BytesIO(new_trk_file))

        # Simulate a TRK file with a wrong property_name.
        noise = np.int32(42).tostring()
        new_trk_file = trk_file[:254] + noise + trk_file[254+4:]
        assert_raises(HeaderError, TrkFile.load, BytesIO(new_trk_file))
示例#2
0
def test_metadata():
    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_equal(len(GiftiDataArray().get_metadata()), 0)

    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_equal(len(GiftiMetaData().get_metadata()), 0)
示例#3
0
def test_metadata():
    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_equal(len(GiftiDataArray().get_metadata()), 0)

    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_equal(len(GiftiMetaData().get_metadata()), 0)
示例#4
0
def test_to_xml_open_close_deprecations():
    # Smoke test on deprecated functions
    da = GiftiDataArray(np.ones((1,)), 'triangle')
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_open(), string_types))
        assert_equal(len(w), 1)
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_close(), string_types))
        assert_equal(len(w), 1)
示例#5
0
def test_to_xml_open_close_deprecations():
    # Smoke test on deprecated functions
    da = GiftiDataArray(np.ones((1, )), 'triangle')
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_open(), str))
        assert_equal(len(w), 1)
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_close(), str))
        assert_equal(len(w), 1)
示例#6
0
    def test_load_file_with_wrong_information(self):
        tck_file = open(DATA['simple_tck_fname'], 'rb').read()

        # Simulate a TCK file where `datatype` has not the right endianness.
        new_tck_file = tck_file.replace(asbytes("Float32LE"),
                                        asbytes("Float32BE"))
        assert_raises(DataError, TckFile.load, BytesIO(new_tck_file))

        # Simulate a TCK file with unsupported `datatype`.
        new_tck_file = tck_file.replace(asbytes("Float32LE"),
                                        asbytes("int32"))
        assert_raises(HeaderError, TckFile.load, BytesIO(new_tck_file))

        # Simulate a TCK file with no `datatype` field.
        new_tck_file = tck_file.replace(b"datatype: Float32LE\n", b"")
        # Need to adjust data offset.
        new_tck_file = new_tck_file.replace(b"file: . 67\n", b"file: . 47\n")
        with clear_and_catch_warnings(record=True, modules=[tck_module]) as w:
            tck = TckFile.load(BytesIO(new_tck_file))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("Missing 'datatype'" in str(w[0].message))
            assert_array_equal(tck.header['datatype'], "Float32LE")

        # Simulate a TCK file with no `file` field.
        new_tck_file = tck_file.replace(b"\nfile: . 67", b"")
        with clear_and_catch_warnings(record=True, modules=[tck_module]) as w:
            tck = TckFile.load(BytesIO(new_tck_file))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("Missing 'file'" in str(w[0].message))
            assert_array_equal(tck.header['file'], ". 56")

        # Simulate a TCK file with `file` field pointing to another file.
        new_tck_file = tck_file.replace(b"file: . 67\n",
                                        b"file: dummy.mat 75\n")
        assert_raises(HeaderError, TckFile.load, BytesIO(new_tck_file))

        # Simulate a TCK file which is missing a streamline delimiter.
        eos = TckFile.FIBER_DELIMITER.tostring()
        eof = TckFile.EOF_DELIMITER.tostring()
        new_tck_file = tck_file[:-(len(eos) + len(eof))] + tck_file[-len(eof):]

        # Force TCK loading to use buffering.
        buffer_size = 1. / 1024**2  # 1 bytes
        hdr = TckFile._read_header(BytesIO(new_tck_file))
        tck_reader = TckFile._read(BytesIO(new_tck_file), hdr, buffer_size)
        assert_raises(DataError, list, tck_reader)

        # Simulate a TCK file which is missing the end-of-file delimiter.
        new_tck_file = tck_file[:-len(eof)]
        assert_raises(DataError, TckFile.load, BytesIO(new_tck_file))
示例#7
0
    def test_load_file_with_wrong_information(self):
        # Simulate a TRK file where `count` was not provided.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.NB_STREAMLINES] = 0
        trk = TrkFile.load(BytesIO(trk_bytes), lazy_load=False)
        assert_tractogram_equal(trk.tractogram, DATA['simple_tractogram'])

        # Simulate a TRK where `vox_to_ras` is not recorded (i.e. all zeros).
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.VOXEL_TO_RASMM] = np.zeros((4, 4))
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            trk = TrkFile.load(BytesIO(trk_bytes))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("identity" in str(w[0].message))
            assert_array_equal(trk.affine, np.eye(4))

        # Simulate a TRK where `vox_to_ras` is invalid.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.VOXEL_TO_RASMM] = np.diag([0, 0, 0, 1])
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file where `voxel_order` was not provided.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.VOXEL_ORDER] = b''
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            TrkFile.load(BytesIO(trk_bytes))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("LPS" in str(w[0].message))

        # Simulate a TRK file with an unsupported version.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct['version'] = 123
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file with a wrong hdr_size.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct['hdr_size'] = 1234
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file with a wrong scalar_name.
        trk_struct, trk_bytes = self.trk_with_bytes('complex_trk_fname')
        trk_struct['scalar_name'][0, 0] = b'colors\x003\x004'
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file with a wrong property_name.
        trk_struct, trk_bytes = self.trk_with_bytes('complex_trk_fname')
        trk_struct['property_name'][0, 0] = b'colors\x003\x004'
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))
示例#8
0
def test_gifti_label_rgba():
    rgba = np.random.rand(4)
    kwargs = dict(zip(['red', 'green', 'blue', 'alpha'], rgba))

    gl1 = GiftiLabel(**kwargs)
    assert_array_equal(rgba, gl1.rgba)

    gl1.red = 2 * gl1.red
    assert_false(np.allclose(rgba, gl1.rgba))  # don't just store the list!

    gl2 = GiftiLabel()
    gl2.rgba = rgba
    assert_array_equal(rgba, gl2.rgba)

    gl2.blue = 2 * gl2.blue
    assert_false(np.allclose(rgba, gl2.rgba))  # don't just store the list!

    def assign_rgba(gl, val):
        gl.rgba = val

    gl3 = GiftiLabel(**kwargs)
    assert_raises(ValueError, assign_rgba, gl3, rgba[:2])
    assert_raises(ValueError, assign_rgba, gl3, rgba.tolist() + rgba.tolist())

    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_equal(kwargs['red'], gl3.get_rgba()[0])
        assert_equal(len(w), 1)

    # Test default value
    gl4 = GiftiLabel()
    assert_equal(len(gl4.rgba), 4)
    assert_true(np.all([elem is None for elem in gl4.rgba]))
示例#9
0
def test_gifti_label_rgba():
    rgba = np.random.rand(4)
    kwargs = dict(zip(['red', 'green', 'blue', 'alpha'], rgba))

    gl1 = GiftiLabel(**kwargs)
    assert_array_equal(rgba, gl1.rgba)

    gl1.red = 2 * gl1.red
    assert_false(np.allclose(rgba, gl1.rgba))  # don't just store the list!

    gl2 = GiftiLabel()
    gl2.rgba = rgba
    assert_array_equal(rgba, gl2.rgba)

    gl2.blue = 2 * gl2.blue
    assert_false(np.allclose(rgba, gl2.rgba))  # don't just store the list!

    def assign_rgba(gl, val):
        gl.rgba = val
    gl3 = GiftiLabel(**kwargs)
    assert_raises(ValueError, assign_rgba, gl3, rgba[:2])
    assert_raises(ValueError, assign_rgba, gl3, rgba.tolist() + rgba.tolist())

    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_equal(kwargs['red'], gl3.get_rgba()[0])
        assert_equal(len(w), 1)

    # Test default value
    gl4 = GiftiLabel()
    assert_equal(len(gl4.rgba), 4)
    assert_true(np.all([elem is None for elem in gl4.rgba]))
示例#10
0
    def test_save_complex_file(self):
        complex_tractogram = Tractogram(DATA['streamlines'],
                                        DATA['data_per_streamline'],
                                        DATA['data_per_point'],
                                        affine_to_rasmm=np.eye(4))

        for ext, cls in nib.streamlines.FORMATS.items():
            with InTemporaryDirectory():
                filename = 'streamlines' + ext

                with clear_and_catch_warnings(record=True,
                                              modules=[trk]) as w:
                    nib.streamlines.save(complex_tractogram, filename)

                    # If streamlines format does not support saving data
                    # per point or data per streamline, a warning message
                    # should be issued.
                    if not (cls.SUPPORTS_DATA_PER_POINT and
                            cls.SUPPORTS_DATA_PER_STREAMLINE):
                        assert_equal(len(w), 1)
                        assert_true(issubclass(w[0].category, Warning))

                    tractogram = Tractogram(DATA['streamlines'],
                                            affine_to_rasmm=np.eye(4))

                    if cls.SUPPORTS_DATA_PER_POINT:
                        tractogram.data_per_point = DATA['data_per_point']

                    if cls.SUPPORTS_DATA_PER_STREAMLINE:
                        data = DATA['data_per_streamline']
                        tractogram.data_per_streamline = data

                    tfile = nib.streamlines.load(filename, lazy_load=False)
                    assert_tractogram_equal(tfile.tractogram, tractogram)
示例#11
0
    def test_save_complex_file(self):
        complex_tractogram = Tractogram(DATA['streamlines'],
                                        DATA['data_per_streamline'],
                                        DATA['data_per_point'],
                                        affine_to_rasmm=np.eye(4))

        for ext, cls in nib.streamlines.FORMATS.items():
            with InTemporaryDirectory():
                filename = 'streamlines' + ext

                with clear_and_catch_warnings(record=True, modules=[trk]) as w:
                    nib.streamlines.save(complex_tractogram, filename)

                    # If streamlines format does not support saving data
                    # per point or data per streamline, a warning message
                    # should be issued.
                    if not (cls.SUPPORTS_DATA_PER_POINT
                            and cls.SUPPORTS_DATA_PER_STREAMLINE):
                        assert_equal(len(w), 1)
                        assert_true(issubclass(w[0].category, Warning))

                    tractogram = Tractogram(DATA['streamlines'],
                                            affine_to_rasmm=np.eye(4))

                    if cls.SUPPORTS_DATA_PER_POINT:
                        tractogram.data_per_point = DATA['data_per_point']

                    if cls.SUPPORTS_DATA_PER_STREAMLINE:
                        data = DATA['data_per_streamline']
                        tractogram.data_per_streamline = data

                    tfile = nib.streamlines.load(filename, lazy_load=False)
                    assert_tractogram_equal(tfile.tractogram, tractogram)
示例#12
0
    def test_save_tractogram_file(self):
        tractogram = Tractogram(DATA['streamlines'], affine_to_rasmm=np.eye(4))
        trk_file = trk.TrkFile(tractogram)

        # No need for keyword arguments.
        assert_raises(ValueError,
                      nib.streamlines.save,
                      trk_file,
                      "dummy.trk",
                      header={})

        # Wrong extension.
        with clear_and_catch_warnings(record=True,
                                      modules=[nib.streamlines]) as w:
            trk_file = trk.TrkFile(tractogram)
            assert_raises(ValueError,
                          nib.streamlines.save,
                          trk_file,
                          "dummy.tck",
                          header={})

            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, ExtensionWarning))
            assert_true("extension" in str(w[0].message))

        with InTemporaryDirectory():
            nib.streamlines.save(trk_file, "dummy.trk")
            tfile = nib.streamlines.load("dummy.trk", lazy_load=False)
            assert_tractogram_equal(tfile.tractogram, tractogram)
示例#13
0
def test_deprecations():
    with clear_and_catch_warnings(modules=[imageclasses]) as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        nifti_single = class_map['nifti_single']
        assert nifti_single['class'] == Nifti1Image
        assert len(w) == 1
        nifti_ext = ext_map['.nii']
        assert nifti_ext == 'nifti_single'
        assert len(w) == 2
示例#14
0
def test_deprecations():
    with clear_and_catch_warnings(modules=[imageclasses]) as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        nifti_single = class_map['nifti_single']
        assert_equal(nifti_single['class'], Nifti1Image)
        assert_equal(len(w), 1)
        nifti_ext = ext_map['.nii']
        assert_equal(nifti_ext, 'nifti_single')
        assert_equal(len(w), 2)
示例#15
0
def test_deprecations():
    with clear_and_catch_warnings(modules=[imageclasses]) as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        nifti_single = class_map['nifti_single']
        assert_equal(nifti_single['class'], Nifti1Image)
        assert_equal(len(w), 1)
        nifti_ext = ext_map['.nii']
        assert_equal(nifti_ext, 'nifti_single')
        assert_equal(len(w), 2)
示例#16
0
def test_metadata():
    nvpair = GiftiNVPairs('key', 'value')
    da = GiftiMetaData(nvpair=nvpair)
    assert_equal(da.data[0].name, 'key')
    assert_equal(da.data[0].value, 'value')
    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        assert_equal(len(GiftiDataArray().get_metadata()), 0)
        assert_equal(len(w), 1)
示例#17
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read, write

        img = read(DATA_FILE1)
        assert_equal(len(w), 1)
        with InTemporaryDirectory():
            write(img, 'test.gii')
        assert_equal(len(w), 2)
示例#18
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read, write

        img = read(DATA_FILE1)
        assert_equal(len(w), 1)
        with InTemporaryDirectory():
            write(img, 'test.gii')
        assert_equal(len(w), 2)
示例#19
0
def test_metadata():
    nvpair = GiftiNVPairs('key', 'value')
    da = GiftiMetaData(nvpair=nvpair)
    assert_equal(da.data[0].name, 'key')
    assert_equal(da.data[0].value, 'value')
    # Test deprecation
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        assert_equal(len(GiftiDataArray().get_metadata()), 0)
        assert_equal(len(w), 1)
示例#20
0
def test_num_dim_deprecation():
    da = GiftiDataArray(np.ones((2, 3, 4)))
    # num_dim is property, set automatically from len(da.dims)
    assert_equal(da.num_dim, 3)
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        # OK setting num_dim to correct value, but raises DeprecationWarning
        da.num_dim = 3
        assert_equal(len(w), 1)
        # Any other value gives a ValueError
        assert_raises(ValueError, setattr, da, 'num_dim', 4)
示例#21
0
def test_num_dim_deprecation():
    da = GiftiDataArray(np.ones((2, 3, 4)))
    # num_dim is property, set automatically from len(da.dims)
    assert_equal(da.num_dim, 3)
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        # OK setting num_dim to correct value, but raises DeprecationWarning
        da.num_dim = 3
        assert_equal(len(w), 1)
        # Any other value gives a ValueError
        assert_raises(ValueError, setattr, da, 'num_dim', 4)
示例#22
0
def test_dataarray():
    for dt_code in data_type_codes.value_set():
        data_type = data_type_codes.type[dt_code]
        if data_type is np.void:  # not supported
            continue
        arr = np.zeros((10, 3), dtype=data_type)
        da = GiftiDataArray.from_array(arr, 'triangle')
        assert_equal(da.datatype, data_type_codes[arr.dtype])
        bs_arr = arr.byteswap().newbyteorder()
        da = GiftiDataArray.from_array(bs_arr, 'triangle')
        assert_equal(da.datatype, data_type_codes[arr.dtype])

    # Smoke test on deprecated functions
    da = GiftiDataArray.from_array(np.ones((1,)), 'triangle')
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_open(), string_types))
        assert_equal(len(w), 1)
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_close(), string_types))
        assert_equal(len(w), 1)
示例#23
0
def test_dataarray():
    for dt_code in data_type_codes.value_set():
        data_type = data_type_codes.type[dt_code]
        if data_type is np.void:  # not supported
            continue
        arr = np.zeros((10, 3), dtype=data_type)
        da = GiftiDataArray.from_array(arr, 'triangle')
        assert_equal(da.datatype, data_type_codes[arr.dtype])
        bs_arr = arr.byteswap().newbyteorder()
        da = GiftiDataArray.from_array(bs_arr, 'triangle')
        assert_equal(da.datatype, data_type_codes[arr.dtype])

    # Smoke test on deprecated functions
    da = GiftiDataArray.from_array(np.ones((1, )), 'triangle')
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_open(), string_types))
        assert_equal(len(w), 1)
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        assert_true(isinstance(da.to_xml_close(), string_types))
        assert_equal(len(w), 1)
示例#24
0
def test_dataarray_from_array():
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        da = GiftiDataArray.from_array(np.ones((3, 4)))
        assert_equal(len(w), 1)
        for dt_code in data_type_codes.value_set():
            data_type = data_type_codes.type[dt_code]
            if data_type is np.void:  # not supported
                continue
            arr = np.zeros((10, 3), dtype=data_type)
            da = GiftiDataArray.from_array(arr, 'triangle')
            assert_equal(da.datatype, data_type_codes[arr.dtype])
            bs_arr = arr.byteswap().newbyteorder()
            da = GiftiDataArray.from_array(bs_arr, 'triangle')
            assert_equal(da.datatype, data_type_codes[arr.dtype])
示例#25
0
    def test_lazy_tractogram_len(self):
        modules = [module_tractogram]  # Modules for which to catch warnings.
        with clear_and_catch_warnings(record=True, modules=modules) as w:
            warnings.simplefilter("always")  # Always trigger warnings.

            # Calling `len` will create new generators each time.
            tractogram = LazyTractogram(DATA['streamlines_func'])
            assert_true(tractogram._nb_streamlines is None)

            # This should produce a warning message.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(tractogram._nb_streamlines, len(DATA['streamlines']))
            assert_equal(len(w), 1)

            tractogram = LazyTractogram(DATA['streamlines_func'])

            # New instances should still produce a warning message.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(len(w), 2)
            assert_true(issubclass(w[-1].category, Warning))

            # Calling again 'len' again should *not* produce a warning.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(len(w), 2)

        with clear_and_catch_warnings(record=True, modules=modules) as w:
            # Once we iterated through the tractogram, we know the length.

            tractogram = LazyTractogram(DATA['streamlines_func'])

            assert_true(tractogram._nb_streamlines is None)
            [t for t in tractogram]  # Force iteration through tractogram.
            assert_equal(tractogram._nb_streamlines, len(DATA['streamlines']))
            # This should *not* produce a warning.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(len(w), 0)
示例#26
0
def test_dataarray_from_array():
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        da = GiftiDataArray.from_array(np.ones((3, 4)))
        assert_equal(len(w), 1)
        for dt_code in data_type_codes.value_set():
            data_type = data_type_codes.type[dt_code]
            if data_type is np.void:  # not supported
                continue
            arr = np.zeros((10, 3), dtype=data_type)
            da = GiftiDataArray.from_array(arr, 'triangle')
            assert_equal(da.datatype, data_type_codes[arr.dtype])
            bs_arr = arr.byteswap().newbyteorder()
            da = GiftiDataArray.from_array(bs_arr, 'triangle')
            assert_equal(da.datatype, data_type_codes[arr.dtype])
示例#27
0
    def test_lazy_tractogram_len(self):
        modules = [module_tractogram]  # Modules for which to catch warnings.
        with clear_and_catch_warnings(record=True, modules=modules) as w:
            warnings.simplefilter("always")  # Always trigger warnings.

            # Calling `len` will create new generators each time.
            tractogram = LazyTractogram(DATA['streamlines_func'])
            assert_true(tractogram._nb_streamlines is None)

            # This should produce a warning message.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(tractogram._nb_streamlines, len(DATA['streamlines']))
            assert_equal(len(w), 1)

            tractogram = LazyTractogram(DATA['streamlines_func'])

            # New instances should still produce a warning message.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(len(w), 2)
            assert_true(issubclass(w[-1].category, Warning))

            # Calling again 'len' again should *not* produce a warning.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(len(w), 2)

        with clear_and_catch_warnings(record=True, modules=modules) as w:
            # Once we iterated through the tractogram, we know the length.

            tractogram = LazyTractogram(DATA['streamlines_func'])

            assert_true(tractogram._nb_streamlines is None)
            [t for t in tractogram]  # Force iteration through tractogram.
            assert_equal(tractogram._nb_streamlines, len(DATA['streamlines']))
            # This should *not* produce a warning.
            assert_equal(len(tractogram), len(DATA['streamlines']))
            assert_equal(len(w), 0)
示例#28
0
 def test_load_trk_version_1(self):
     # Simulate and test a TRK (version 1).
     # First check that setting the RAS affine works in version 2.
     trk_struct, trk_bytes = self.trk_with_bytes()
     trk_struct[Field.VOXEL_TO_RASMM] = np.diag([2, 3, 4, 1])
     trk = TrkFile.load(BytesIO(trk_bytes))
     assert_array_equal(trk.affine, np.diag([2, 3, 4, 1]))
     # Next check that affine assumed identity if version 1.
     trk_struct['version'] = 1
     with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
         trk = TrkFile.load(BytesIO(trk_bytes))
         assert_equal(len(w), 1)
         assert_true(issubclass(w[0].category, HeaderWarning))
         assert_true("identity" in str(w[0].message))
         assert_array_equal(trk.affine, np.eye(4))
         assert_array_equal(trk.header['version'], 1)
示例#29
0
    def validate_filenames(self, imaker, params):
        # Validate the filename, file_map interface

        if not self.can_save:
            raise unittest.SkipTest
        img = imaker()
        img.set_data_dtype(np.float32)  # to avoid rounding in load / save
        # Make sure the object does not have a file_map
        img.file_map = None
        # The bytesio_round_trip helper tests bytesio load / save via file_map
        rt_img = bytesio_round_trip(img)
        assert_array_equal(img.shape, rt_img.shape)
        assert_almost_equal(img.get_fdata(), rt_img.get_fdata())
        assert_almost_equal(np.asanyarray(img.dataobj),
                            np.asanyarray(rt_img.dataobj))
        # Give the image a file map
        klass = type(img)
        rt_img.file_map = bytesio_filemap(klass)
        # This object can now be saved and loaded from its own file_map
        rt_img.to_file_map()
        rt_rt_img = klass.from_file_map(rt_img.file_map)
        assert_almost_equal(img.get_fdata(), rt_rt_img.get_fdata())
        assert_almost_equal(np.asanyarray(img.dataobj),
                            np.asanyarray(rt_img.dataobj))
        # get_ / set_ filename
        fname = 'an_image' + self.standard_extension
        for path in (fname, pathlib.Path(fname)):
            img.set_filename(path)
            assert img.get_filename() == str(path)
            assert img.file_map['image'].filename == str(path)
        # to_ / from_ filename
        fname = 'another_image' + self.standard_extension
        for path in (fname, pathlib.Path(fname)):
            with InTemporaryDirectory():
                # Validate that saving or loading a file doesn't use deprecated methods internally
                with clear_and_catch_warnings() as w:
                    warnings.filterwarnings('error',
                                            category=DeprecationWarning,
                                            module=r"nibabel.*")
                    img.to_filename(path)
                    rt_img = img.__class__.from_filename(path)
                assert_array_equal(img.shape, rt_img.shape)
                assert_almost_equal(img.get_fdata(), rt_img.get_fdata())
                assert_almost_equal(np.asanyarray(img.dataobj),
                                    np.asanyarray(rt_img.dataobj))
                del rt_img  # to allow windows to delete the directory
示例#30
0
def test_labeltable():
    img = GiftiImage()
    assert_equal(len(img.labeltable.labels), 0)

    new_table = GiftiLabelTable()
    new_table.labels += ['test', 'me']
    img.labeltable = new_table
    assert_equal(len(img.labeltable.labels), 2)

    # Test deprecations
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        newer_table = GiftiLabelTable()
        newer_table.labels += ['test', 'me', 'again']
        img.set_labeltable(newer_table)
        assert_equal(len(w), 1)
        assert_equal(len(img.get_labeltable().labels), 3)
        assert_equal(len(w), 2)
示例#31
0
def test_labeltable():
    img = GiftiImage()
    assert_equal(len(img.labeltable.labels), 0)

    new_table = GiftiLabelTable()
    new_table.labels += ['test', 'me']
    img.labeltable = new_table
    assert_equal(len(img.labeltable.labels), 2)

    # Test deprecations
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('always', category=DeprecationWarning)
        newer_table = GiftiLabelTable()
        newer_table.labels += ['test', 'me', 'again']
        img.set_labeltable(newer_table)
        assert_equal(len(w), 1)
        assert_equal(len(img.get_labeltable().labels), 3)
        assert_equal(len(w), 2)
示例#32
0
def test_wrong_shape():
    scalar_map = create_scalar_map((0, ))
    brain_model_map = create_geometry_map((1, ))

    matrix = ci.Cifti2Matrix()
    matrix.append(scalar_map)
    matrix.append(brain_model_map)
    hdr = ci.Cifti2Header(matrix)

    # correct shape is (2, 10)
    for data in (
            np.random.randn(1, 11),
            np.random.randn(2, 10, 1),
            np.random.randn(1, 2, 10),
            np.random.randn(3, 10),
            np.random.randn(2, 9),
    ):
        with clear_and_catch_warnings():
            with error_warnings():
                assert_raises(UserWarning, ci.Cifti2Image, data, hdr)
        with suppress_warnings():
            img = ci.Cifti2Image(data, hdr)
        assert_raises(ValueError, img.to_file_map)
示例#33
0
    def test_save_tractogram_file(self):
        tractogram = Tractogram(DATA['streamlines'],
                                affine_to_rasmm=np.eye(4))
        trk_file = trk.TrkFile(tractogram)

        # No need for keyword arguments.
        assert_raises(ValueError, nib.streamlines.save,
                      trk_file, "dummy.trk", header={})

        # Wrong extension.
        with clear_and_catch_warnings(record=True,
                                      modules=[nib.streamlines]) as w:
            trk_file = trk.TrkFile(tractogram)
            assert_raises(ValueError, nib.streamlines.save,
                          trk_file, "dummy.tck", header={})

            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, ExtensionWarning))
            assert_true("extension" in str(w[0].message))

        with InTemporaryDirectory():
            nib.streamlines.save(trk_file, "dummy.trk")
            tfile = nib.streamlines.load("dummy.trk", lazy_load=False)
            assert_tractogram_equal(tfile.tractogram, tractogram)
示例#34
0
def test_data_tag_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        data_tag(np.array([]), 'ASCII', '%i', 1)
        assert_equal(len(w), 1)
示例#35
0
 def setUp(self):
     with clear_and_catch_warnings() as w:
         warnings.simplefilter('always', DeprecationWarning)
         assert_equal(len(w), 1)
示例#36
0
    def test_load_file_with_wrong_information(self):
        # Simulate a TRK file where `voxel_order` is lowercase.
        trk_struct1, trk_bytes1 = self.trk_with_bytes()
        trk_struct1[Field.VOXEL_ORDER] = b'LAS'
        trk1 = TrkFile.load(BytesIO(trk_bytes1))
        trk_struct2, trk_bytes2 = self.trk_with_bytes()
        trk_struct2[Field.VOXEL_ORDER] = b'las'
        trk2 = TrkFile.load(BytesIO(trk_bytes2))
        trk1_aff2rasmm = get_affine_trackvis_to_rasmm(trk1.header)
        trk2_aff2rasmm = get_affine_trackvis_to_rasmm(trk2.header)
        assert_array_equal(trk1_aff2rasmm,trk2_aff2rasmm)

        # Simulate a TRK file where `count` was not provided.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.NB_STREAMLINES] = 0
        trk = TrkFile.load(BytesIO(trk_bytes), lazy_load=False)
        assert_tractogram_equal(trk.tractogram, DATA['simple_tractogram'])

        # Simulate a TRK where `vox_to_ras` is not recorded (i.e. all zeros).
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.VOXEL_TO_RASMM] = np.zeros((4, 4))
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            trk = TrkFile.load(BytesIO(trk_bytes))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("identity" in str(w[0].message))
            assert_array_equal(trk.affine, np.eye(4))

        # Simulate a TRK where `vox_to_ras` is invalid.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.VOXEL_TO_RASMM] = np.diag([0, 0, 0, 1])
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file where `voxel_order` was not provided.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct[Field.VOXEL_ORDER] = b''
        with clear_and_catch_warnings(record=True, modules=[trk_module]) as w:
            TrkFile.load(BytesIO(trk_bytes))
            assert_equal(len(w), 1)
            assert_true(issubclass(w[0].category, HeaderWarning))
            assert_true("LPS" in str(w[0].message))

        # Simulate a TRK file with an unsupported version.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct['version'] = 123
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file with a wrong hdr_size.
        trk_struct, trk_bytes = self.trk_with_bytes()
        trk_struct['hdr_size'] = 1234
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file with a wrong scalar_name.
        trk_struct, trk_bytes = self.trk_with_bytes('complex_trk_fname')
        trk_struct['scalar_name'][0, 0] = b'colors\x003\x004'
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))

        # Simulate a TRK file with a wrong property_name.
        trk_struct, trk_bytes = self.trk_with_bytes('complex_trk_fname')
        trk_struct['property_name'][0, 0] = b'colors\x003\x004'
        assert_raises(HeaderError, TrkFile.load, BytesIO(trk_bytes))
示例#37
0
def test_data_tag_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.filterwarnings('once', category=DeprecationWarning)
        data_tag(np.array([]), 'ASCII', '%i', 1)
        assert_equal(len(w), 1)
示例#38
0
 def setUp(self):
     with clear_and_catch_warnings() as w:
         warnings.simplefilter('always', DeprecationWarning)
         assert_equal(len(w), 1)