def test_pconnscalar(): parcel_map = create_parcel_map((0, 1)) scalar_map = create_scalar_map((2, )) matrix = ci.Cifti2Matrix() matrix.append(parcel_map) matrix.append(scalar_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(3, 3, 13) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_' 'PARCELLATED_SCALAR') with InTemporaryDirectory(): ci.save(img, 'test.pconnscalar.nii') img2 = ci.load('test.pconnscalar.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnPPSc') assert_true(isinstance(img2, ci.Cifti2Image)) assert_true((img2.get_data() == data).all()) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_parcel_map(img2.header.matrix.get_index_map(0)) check_scalar_map(img2.header.matrix.get_index_map(2)) del img2
def run_from_args(args): """ Runs the script based on a Namespace containing the command line arguments """ logger.info('starting %s', op.basename(__file__)) dconn1 = cifti2.load(args.input) axes1 = dconn1.header.get_axis(dconn1) dconn2 = cifti2.load(args.reference) axes2 = dconn2.header.get_axis(dconn2) if not (isinstance(axes1[1], cifti2.BrainModelAxis) or isinstance(axes1[1], cifti2.ParcelsAxis)): raise ValueError("Columns should have greyordinates or parcels") if axes1[1] != axes2[1]: raise ValueError( "Compared CIFTI files should have the same greyordinates/parcels along the columns" ) if args.as_dconn: if isinstance(axes1[0], cifti2.SeriesAxis): axes1 = (axes1[1], ) * 2 dconn1 = FakeDConn(dconn1) elif isinstance(axes2[0], cifti2.SeriesAxis): axes2 = (axes2[1], ) * 2 dconn2 = FakeDConn(dconn2) if args.split_dconn and not isinstance(axes1[0], cifti2.BrainModelAxis): raise ValueError( "Rows should be greyordinates when using the option --split_dconn") if axes1[0] != axes2[0]: raise ValueError( "Compared CIFTI files should have the same features along the rows" ) as_dict = run( dconn1=dconn1, dconn2=dconn2, split_greyordinates=axes1[0] if args.split_dconn else None, ) scalar = cifti2.ScalarAxis(list(as_dict.keys())) arr = np.stack(list(as_dict.values()), -1) cifti2.Cifti2Image(arr.T, header=(scalar, axes1[1])).to_filename(args.output) logger.info('ending %s', op.basename(__file__))
def from_cifti(filename: str, basename=''): """ Converts a cifti file into a pandas dataframe :param filename: input dense cifti file :param basename: basename of the dataframe columns :return: pandas datraframe with any voxels/vertices in the dense input """ img = cifti2.load(filename) arr = np.asarray(img.dataobj) axis = img.header.get_axis(0) bm = img.header.get_axis(1) if not isinstance(bm, cifti2.BrainModelAxis): raise ValueError(f'Input CIFTI file {filename} is not dense') as_dict = { ('voxel', 'i'): bm.voxel[:, 0], ('voxel', 'j'): bm.voxel[:, 1], ('voxel', 'k'): bm.voxel[:, 2], ('vertex', ''): bm.vertex, ('structure', 'hemisphere'): [BrainStructure.from_string(name).hemisphere for name in bm.name], ('structure', 'region'): [BrainStructure.from_string(name).primary for name in bm.name], ('structure', 'cifti_label'): bm.name } if isinstance(axis, cifti2.ScalarAxis): for sub_arr, name in zip(arr, axis.name): if len(axis) == 1: name = '' as_dict[(basename, name)] = sub_arr elif isinstance(axis, cifti2.SeriesAxis): for sub_arr, name in zip(arr, np.arange(len(axis))): as_dict[(basename, name)] = sub_arr elif isinstance(axis, cifti2.LabelAxis): for sub_arr, name, mapping in zip(arr, axis.name, axis.label): if len(axis) == 1: name = '' label_names = {key: value[0] for key, value in mapping.items()} as_dict[(basename, name)] = label_names[sub_arr] elif isinstance(axis, cifti2.ParcelsAxis): for sub_arr, name in zip(arr, axis.name): as_dict[(basename, name)] = sub_arr df = pd.DataFrame.from_dict(as_dict) df.columns = pd.MultiIndex.from_tuples(df.columns) return df
def calc_group_correlation(df, roi): """ Group average of correlation map Parameters ---------- df : pandas dataframe REQUIRED COLUMNS FilePath - lists file path to dtseries files Identifier - unique identifier (used for indiual connectivity map naming) roi : str Path to roi dscalar file. save_individual : boolean flag whether to save individual connectivity maps out_dir : str Path to output directory. This is only used if save_individual is True. Returns ------- resuts : np.ndarray (x, y, z, 4) (Method1, Method2, z-scored Method1, z-scored Method2) np.ndarray (x, y, z) the scaling factor. This is purely for bookkeeping purposes. """ rcii = ci.load(roi) rdata = np.squeeze(rcii.get_fdata()) != 0 N = df.shape[0] m1 = [] m2 = [] for i, row in df.iterrows(): print("Working on image {} of {}: {}".format(i + 1, N, row.FilePath)) dcii = ci.load(row.FilePath) ts = dcii.get_fdata() ts_roi = ts[:, rdata] m1.append(np.squeeze(MH.pearson_2d(ts.T, ts_roi.T.mean(0, keepdims=True)))) m2.append(MH.pearson_2d(ts.T, ts_roi.T).mean(1)) return np.array(m1), np.array(m2)
def test_readwritedata(): with InTemporaryDirectory(): for name in datafiles: img = ci.load(name) ci.save(img, 'test.nii') img2 = ci.load('test.nii') assert len(img.header.matrix) == len(img2.header.matrix) # Order should be preserved in load/save for mim1, mim2 in zip(img.header.matrix, img2.header.matrix): named_maps1 = [m_ for m_ in mim1 if isinstance(m_, ci.Cifti2NamedMap)] named_maps2 = [m_ for m_ in mim2 if isinstance(m_, ci.Cifti2NamedMap)] assert len(named_maps1) == len(named_maps2) for map1, map2 in zip(named_maps1, named_maps2): assert map1.map_name == map2.map_name if map1.label_table is None: assert map2.label_table is None else: assert len(map1.label_table) == len(map2.label_table) assert_array_almost_equal(img.dataobj, img2.dataobj)
def test_readwritedata(): with InTemporaryDirectory(): for name in datafiles: img = ci.load(name) ci.save(img, 'test.nii') img2 = ci.load('test.nii') assert_equal(len(img.header.matrix), len(img2.header.matrix)) # Order should be preserved in load/save for mim1, mim2 in zip(img.header.matrix, img2.header.matrix): named_maps1 = [m_ for m_ in mim1 if isinstance(m_, ci.Cifti2NamedMap)] named_maps2 = [m_ for m_ in mim2 if isinstance(m_, ci.Cifti2NamedMap)] assert_equal(len(named_maps1), len(named_maps2)) for map1, map2 in zip(named_maps1, named_maps2): assert_equal(map1.map_name, map2.map_name) if map1.label_table is None: assert_true(map2.label_table is None) else: assert_equal(len(map1.label_table), len(map2.label_table)) assert_array_almost_equal(img.dataobj, img2.dataobj)
def test_pconn(): mapping = create_parcel_map((0, 1)) matrix = ci.Cifti2Matrix() matrix.append(mapping) hdr = ci.Cifti2Header(matrix) data = np.random.randn(3, 3) img = ci.Cifti2Image(data, hdr) with InTemporaryDirectory(): ci.save(img, 'test.pconn.nii') img2 = ci.load('test.pconn.nii') assert_true((img2.get_data() == data).all()) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_parcel_map(img2.header.matrix.get_index_map(0)) del img2
def test_plabel(): label_map = create_label_map((0, )) parcel_map = create_parcel_map((1, )) matrix = ci.Cifti2Matrix() matrix.append(label_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(2, 3) img = ci.Cifti2Image(data, hdr) with InTemporaryDirectory(): ci.save(img, 'test.plabel.nii') img2 = ci.load('test.plabel.nii') assert_true((img2.get_data() == data).all()) check_label_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2
def test_dscalar(): scalar_map = create_scalar_map((0, )) geometry_map = create_geometry_map((1, )) matrix = ci.Cifti2Matrix() matrix.append(scalar_map) matrix.append(geometry_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(2, 9) img = ci.Cifti2Image(data, hdr) with InTemporaryDirectory(): ci.save(img, 'test.dscalar.nii') img2 = ci.load('test.dscalar.nii') assert_true((img2.get_data() == data).all()) check_scalar_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2
def test_plabel(): label_map = create_label_map((0, )) parcel_map = create_parcel_map((1, )) matrix = ci.Cifti2Matrix() matrix.append(label_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(2, 3) img = ci.Cifti2Image(data, hdr) with InTemporaryDirectory(): ci.save(img, 'test.plabel.nii') img2 = ci.load('test.plabel.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnUnknown') assert_true(isinstance(img2, ci.Cifti2Image)) assert_true((img2.get_data() == data).all()) check_label_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2
def test_pconn(): mapping = create_parcel_map((0, 1)) matrix = ci.Cifti2Matrix() matrix.append(mapping) hdr = ci.Cifti2Header(matrix) data = np.random.randn(4, 4) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED') with InTemporaryDirectory(): ci.save(img, 'test.pconn.nii') img2 = ci.load('test.pconn.nii') assert img.nifti_header.get_intent()[0] == 'ConnParcels' assert isinstance(img2, ci.Cifti2Image) assert_array_equal(img2.get_fdata(), data) assert img2.header.matrix.get_index_map( 0) == img2.header.matrix.get_index_map(1) check_parcel_map(img2.header.matrix.get_index_map(0)) del img2
def test_plabel(): label_map = create_label_map((0, )) parcel_map = create_parcel_map((1, )) matrix = ci.Cifti2Matrix() matrix.append(label_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(2, 4) img = ci.Cifti2Image(data, hdr) with InTemporaryDirectory(): ci.save(img, 'test.plabel.nii') img2 = ci.load('test.plabel.nii') assert img.nifti_header.get_intent()[0] == 'ConnUnknown' assert isinstance(img2, ci.Cifti2Image) assert_array_equal(img2.get_fdata(), data) check_label_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2
def test_dpconn(): parcel_map = create_parcel_map((0, )) geometry_map = create_geometry_map((1, )) matrix = ci.Cifti2Matrix() matrix.append(parcel_map) matrix.append(geometry_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(2, 3) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_DENSE_PARCELLATED') with InTemporaryDirectory(): ci.save(img, 'test.dpconn.nii') img2 = ci.load('test.dpconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseParcel') assert_true(isinstance(img2, ci.Cifti2Image)) assert_true((img2.get_data() == data).all()) check_parcel_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2
def test_pdconn(): geometry_map = create_geometry_map((0, )) parcel_map = create_parcel_map((1, )) matrix = ci.Cifti2Matrix() matrix.append(geometry_map) matrix.append(parcel_map) hdr = ci.Cifti2Header(matrix) data = np.random.randn(10, 4) img = ci.Cifti2Image(data, hdr) img.nifti_header.set_intent('NIFTI_INTENT_CONNECTIVITY_PARCELLATED_DENSE') with InTemporaryDirectory(): ci.save(img, 'test.pdconn.nii') img2 = ci.load('test.pdconn.nii') assert img2.nifti_header.get_intent()[0] == 'ConnParcelDense' assert isinstance(img2, ci.Cifti2Image) assert_array_equal(img2.get_fdata(), data) check_geometry_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2
def create_pscalar_from_dlabel(dlabel, data, name_infos): """Create a pscalar from a dlabel with data data""" pinfo = [] d_cii = ci.load(dlabel) labels = d_cii.header.matrix.get_index_map(0) brain_models = d_cii.header.matrix.get_index_map(1) for element in brain_models: if isinstance(element, ci.Cifti2BrainModel): if element.model_type == ModelType.SURFACE: pinfo.append( ParcelInfo(name=element.brain_structure, surfaces=SurfaceInfo(element.brain_structure, element.vertex_indices), voxel_ijk=None)) if element.model_type == ModelType.VOXEL: pinfo.append( ParcelInfo(name=element.brain_structure, surfaces=None, voxel_ijk=voxel_indices_ijk))
def main(): args = parse_arguments() if args.t: print( "roi : {}\n".format(args.roi) + "input : {}\n".format(args.input) + "prefix : {}\n".format(args.prefix) ) return 0 # do file checking here if not os.path.isfile(args.roi): print("roi file does not exist: {}".format(args.roi)) return -1 df = pd.read_csv(args.input) for i in df.FilePath: if not os.path.isfile(i): print("input file does not exist: {}".format(i)) return -1 out_dir = os.path.dirname(args.prefix) if len(out_dir) == 0: out_dir = "./" out_base = os.path.basename(args.prefix) if not os.path.isdir(out_dir): print("out directory does not exist: {}".format(out_dir)) return -1 m1, m2 = calc_group_correlation(df, args.roi) rcii = ci.load(args.roi) out_m1 = CH.create_dscalar_from_template(rcii, m1, df.Identifier) out_m2 = CH.create_dscalar_from_template(rcii, m2, df.Identifier) ci.save(out_m1, args.prefix + "_m1.dscalar.nii") ci.save(out_m2, args.prefix + "_m2.dscalar.nii") return 0
def test_read_internal(): img2 = ci.load(DATA_FILE6) assert_true(isinstance(img2.header, ci.Cifti2Header)) assert_equal(img2.shape, (1, 91282))
def test_cifti2types(): """Check that we instantiate Cifti2 classes correctly, and that our test files exercise all classes""" counter = {ci.Cifti2LabelTable: 0, ci.Cifti2Label: 0, ci.Cifti2NamedMap: 0, ci.Cifti2Surface: 0, ci.Cifti2VoxelIndicesIJK: 0, ci.Cifti2Vertices: 0, ci.Cifti2Parcel: 0, ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ: 0, ci.Cifti2Volume: 0, ci.Cifti2VertexIndices: 0, ci.Cifti2BrainModel: 0, ci.Cifti2MatrixIndicesMap: 0, } for name in datafiles: hdr = ci.load(name).header # Matrix and MetaData aren't conditional, so don't bother counting assert_true(isinstance(hdr.matrix, ci.Cifti2Matrix)) assert_true(isinstance(hdr.matrix.metadata, ci.Cifti2MetaData)) for mim in hdr.matrix: assert_true(isinstance(mim, ci.Cifti2MatrixIndicesMap)) counter[ci.Cifti2MatrixIndicesMap] += 1 for map_ in mim: print(map_) if isinstance(map_, ci.Cifti2BrainModel): counter[ci.Cifti2BrainModel] += 1 if isinstance(map_.vertex_indices, ci.Cifti2VertexIndices): counter[ci.Cifti2VertexIndices] += 1 if isinstance(map_.voxel_indices_ijk, ci.Cifti2VoxelIndicesIJK): counter[ci.Cifti2VoxelIndicesIJK] += 1 elif isinstance(map_, ci.Cifti2NamedMap): counter[ci.Cifti2NamedMap] += 1 assert_true(isinstance(map_.metadata, ci.Cifti2MetaData)) if isinstance(map_.label_table, ci.Cifti2LabelTable): counter[ci.Cifti2LabelTable] += 1 for label in map_.label_table: assert_true(isinstance(map_.label_table[label], ci.Cifti2Label)) counter[ci.Cifti2Label] += 1 elif isinstance(map_, ci.Cifti2Parcel): counter[ci.Cifti2Parcel] += 1 if isinstance(map_.voxel_indices_ijk, ci.Cifti2VoxelIndicesIJK): counter[ci.Cifti2VoxelIndicesIJK] += 1 assert_true(isinstance(map_.vertices, list)) for vtcs in map_.vertices: assert_true(isinstance(vtcs, ci.Cifti2Vertices)) counter[ci.Cifti2Vertices] += 1 elif isinstance(map_, ci.Cifti2Surface): counter[ci.Cifti2Surface] += 1 elif isinstance(map_, ci.Cifti2Volume): counter[ci.Cifti2Volume] += 1 if isinstance(map_.transformation_matrix_voxel_indices_ijk_to_xyz, ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ): counter[ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ] += 1 assert_equal(list(mim.named_maps), [m_ for m_ in mim if isinstance(m_, ci.Cifti2NamedMap)]) assert_equal(list(mim.surfaces), [m_ for m_ in mim if isinstance(m_, ci.Cifti2Surface)]) assert_equal(list(mim.parcels), [m_ for m_ in mim if isinstance(m_, ci.Cifti2Parcel)]) assert_equal(list(mim.brain_models), [m_ for m_ in mim if isinstance(m_, ci.Cifti2BrainModel)]) assert_equal([mim.volume] if mim.volume else [], [m_ for m_ in mim if isinstance(m_, ci.Cifti2Volume)]) for klass, count in counter.items(): assert_true(count > 0, "No exercise of " + klass.__name__)
def test_read_internal(): img2 = ci.load(DATA_FILE6) assert isinstance(img2.header, ci.Cifti2Header) assert img2.shape == (1, 91282)
# Import dependencies. import numpy as np import nibabel.cifti2 as ci import math from mgcpy.independence_tests.dcorrx import DCorrX from mgcpy.independence_tests.mgcx import MGCX # Load image - individual 100307. img = ci.load("rfMRI_REST1_LR_Atlas_hp2000_clean_filt_sm6.HCPMMP.ptseries.nii") fmri_data = np.array(img.get_fdata()) # Parameters and constants. verbose = True # Print output to track progress. M = 0 # Number of lags in the past to inspect. timesteps = range(300) # Initialize n = len(timesteps) # Number of samples. p = fmri_data.shape[1] # Number of parcels. dcorrx = DCorrX(max_lag=M) mgcx = MGCX(max_lag=M) # i and j represent the parcels of which to test independence. def compute_dcorrx(i, j): X = fmri_data[sample_indices, i].reshape(n, 1) Y = fmri_data[sample_indices, j].reshape(n, 1) dcorrx_statistic, metadata = dcorrx.test_statistic(X, Y) dcorrx_optimal_lag = metadata['optimal_lag']
2- Nibabel cifti2 reader has some issues (v2.2.0) and does not read NaN values. However the information which can be read is still useful and can be converted to Brainvoyager SMP's by borrowing information form dlabel.nii files in Matlab. 3- Upon running this script, there will be the following warning: 'pixdim[1,2,3] should be non-zero; setting 0 dims to 1'. This can be ignored. """ from scipy.io import savemat from nibabel.cifti2 import load fname_myel = '/path/to/Q1-Q6_RelatedParcellation210.MyelinMap_BC_MSMAll_2_d41_WRN_DeDrift.32k_fs_LR.dscalar.nii' fname_curv = '/path/to/Q1-Q6_RelatedParcellation210.curvature_MSMAll_2_d41_WRN_DeDrift.32k_fs_LR.dscalar.nii' fname_thic = '/path/to/Q1-Q6_RelatedParcellation210.corrThickness_MSMAll_2_d41_WRN_DeDrift.32k_fs_LR.dscalar.nii' # Enter the name for the output mat file fname_out = '/path/to/scalars.mat' # load cifti images (not reading correctly but good enough to extract maps) myel = load(fname_myel) curv = load(fname_curv) thic = load(fname_thic) # save the maps to be used with neuroelf mat = dict() mat['myelin'] = myel.get_data() mat['curvature'] = curv.get_data() mat['cortical_thickness'] = thic.get_data() savemat(fname_out, mat) print('Finished.')