def write_to_gifti(result, giftis, script_name, zero_mask, cifti=True): example_array_L = giftis[0].darrays[0] example_array_R = giftis[1].darrays[0] L_shape = len(giftis[0].agg_data()[0]) padded_result = np.zeros([result.shape[0], L_shape * 2]) padded_result[:, zero_mask] = result L_result = padded_result[:, :L_shape] R_result = padded_result[:, L_shape:] L_gifti_image = GiftiImage(meta=giftis[0].meta) R_gifti_image = GiftiImage(meta=giftis[1].meta) for row_L, row_R in zip(L_result, R_result): gifti_array_L = GiftiDataArray(row_L, intent=example_array_L.intent, datatype=example_array_L.datatype, meta=example_array_L.meta) gifti_array_R = GiftiDataArray(row_R, intent=example_array_R.intent, datatype=example_array_R.datatype, meta=example_array_R.meta) L_gifti_image.add_gifti_data_array(gifti_array_L) R_gifti_image.add_gifti_data_array(gifti_array_R) nb.save(L_gifti_image, f'{script_name}.L.func.gii') nb.save(R_gifti_image, f'{script_name}.R.func.gii') if cifti: os.system(f'./utils/giftis_to_cifti.sh {script_name}.L.func.gii ' f'{script_name}.R.func.gii {script_name}.dtseries.nii')
def _write_gifti(pd, opth): # TODO: what about pointdata? from nibabel.gifti.gifti import GiftiDataArray if not pd.has_only_triangle: raise ValueError('GIFTI writer only accepts triangles.') points = GiftiDataArray(data=pd.Points, intent=INTENT_POINTS) cells = GiftiDataArray(data=pd.GetCells2D(), intent=INTENT_CELLS) # if data is not None: # data_array = GiftiDataArray(data=data, intent=INTENT_POINTDATA) # gii = nb.gifti.GiftiImage(darrays=[points, cells, data_array]) # else: g = nb.gifti.GiftiImage(darrays=[points, cells]) nb.save(g, opth)
def save_gifti_texture(self, filename): """ save the texture of the surface in a gifti (BrainVisa) format""" gifti_image = GiftiImage() if surface_master is None: vertices = np.zeros(np.max(self.label.vertices)) else: vertices = np.zeros(self.pos_length) vertices[list(self.label.vertices)] = 1 # create gifti information darray = GiftiDataArray.from_array(vertices, intent='NIFTI_INTENT_LABEL', endian='LittleEndian') # location darray.intent = 0 giftiImage.add_gifti_data_array(darray) gifti.giftiio.write(gifti_image, filename)
def remove_gii(centers): for center in centers: regs = gii_regs(center) pathes, *_ = center.get_cortical_thickness_pathes() x = create_x(center) for xi, path in zip(x, pathes): ct_gii = nib.load(path) newpath = path.replace('resampled_32k', 'resampled_32k.removed') ct_darray = ct_gii.get_arrays_from_intent(0)[0] data = ct_darray.data shape = data.shape data = np.nan_to_num(data) data = data.flatten() new_data = np.zeros_like(data) index = data != np.nan for (reg, i) in zip(regs, index): new_data[i] = data[i] - np.dot(xi[:4], reg.coef_[:4]) new_data = np.reshape(new_data, newshape=shape) gdarray = GiftiDataArray.from_array(new_data, intent=0) ct_gii.remove_gifti_data_array_by_intent(0) ct_gii.add_gifti_data_array(gdarray) nib.save(ct_gii, newpath)
def meta_ct(label_eg, label_cg, p_thres=0.001, topn=0.3, save_gii=True, save_nii=False, mask=None, csv_prefix='roi_ct_removed', csv_dir_prefix='./data/meta_csv', out_dir_prefix='./results/meta'): models = {} surfix = '{}_{}/{}'.format(label_eg, label_cg, csv_prefix) out_dir = os.path.join(out_dir_prefix, surfix) if not os.path.isdir(out_dir): os.mkdir(out_dir) csv_dir = os.path.join(csv_dir_prefix, surfix) csvs = os.listdir(csv_dir) for f in csvs: csv_path = os.path.join(csv_dir, f) model = csv_meta_analysis(csv_path, model_type='random') models[f[:-4]] = model if save_gii: for annot, surf, lr in zip(annots, surfs, l_r): a = surface.load_surf_data(annot_dir.format(annot)) a = a.astype(np.float32) b = surf_dir.format(surf) tmp_gii = nib.load(b) cor_model, _ = bon_cor(models, thres=p_thres) ll = np.unique(a).tolist() _, sorted_models = sort_models(cor_model, descend=False) top_es = sorted_models[int(len(sorted_models) * topn)].total_effect_size for k, v in cor_model.items(): _id = np.float32(k) if v.total_effect_size <= top_es: a[a == _id] = v.total_effect_size if _id in ll: ll.remove(_id) for i in ll: a[a == i] = 0 gdarray = GiftiDataArray.from_array(a, intent=0) tmp_gii.remove_gifti_data_array_by_intent(0) tmp_gii.add_gifti_data_array(gdarray) path = os.path.join( out_dir, 'es_{}_bon{}_top{}.gii'.format(lr, str(p_thres)[2:], str(topn)[1:])) nib.save(tmp_gii, path) if save_nii: nii_array = mask.data.astype(np.float32) p_array = nii_array ll = mask.labels.tolist() for k, v in models.items(): _id = int(k) nii_array[nii_array == _id] = v.total_effect_size p_array[p_array == _id] = v.p ll.remove(_id) for i in ll: nii_array[nii_array == i] = 0 path = os.path.join(out_dir, 'es.nii') p_path = os.path.join(out_dir, 'p.nii') utils.gen_nii(nii_array, mask.nii, path) utils.gen_nii(p_array, mask.nii, p_path) return models
def meta_gii(centers, label_eg, label_cg, out_dir='./results/meta/{}_{}'): out_dir = out_dir.format(label_eg, label_cg) if not os.path.isdir(out_dir): os.mkdir(out_dir) out_dir = os.path.join(out_dir, 'surf') if not os.path.isdir(out_dir): os.mkdir(out_dir) with open(os.path.join(out_dir, 'centers.txt'), "w") as text_file: center_mean_dict = {} center_std_dict = {} center_count_dict = {} for center in centers: n1 = len(center.get_by_label(label_eg)) n2 = len(center.get_by_label(label_cg)) if n1 == 0 or n2 == 0: continue print('{}: e:{}, c:{}'.format(center.name, n1, n2), file=text_file) group_mean_dict = {} group_std_dict = {} group_count_dict = {} for label in [label_eg, label_cg]: m, s, n = center.load_msn_array(label, _dir='surf') group_mean_dict[label] = m group_std_dict[label] = s group_count_dict[label] = n center_mean_dict[center.name] = group_mean_dict center_std_dict[center.name] = group_std_dict center_count_dict[center.name] = group_count_dict results = voxelwise_meta_analysis(label_eg, label_cg, center_mean_dict=center_mean_dict, center_std_dict=center_std_dict, center_count_dict=center_count_dict, dtype=np.float32) es = results[0] p = results[-1] es_l = es[:32492] es_r = es[32492:] p_l = p[:32492] p_r = p[32492:] es_l = es_l[p_l < 0.001] es_r = es_r[p_r < 0.001] path = os.path.join(out_dir, 'es_l_bon001.gii') gii_path = temp_dir.format('lh.central.freesurfer.gii') ct_gii = nib.load(gii_path) gdarray = GiftiDataArray.from_array(es_l, intent=0) ct_gii.remove_gifti_data_array_by_intent(0) ct_gii.add_gifti_data_array(gdarray) nib.save(ct_gii, path) path = os.path.join(out_dir, 'es_r_bon001.gii') gii_path = temp_dir.format('rh.central.freesurfer.gii') ct_gii = nib.load(gii_path) gdarray = GiftiDataArray.from_array(es_r, intent=0) ct_gii.remove_gifti_data_array_by_intent(0) ct_gii.add_gifti_data_array(gdarray) nib.save(ct_gii, path) result_names = ['es', 'var', 'se', 'll', 'ul', 'q', 'z', 'p'] for result, name in zip(results, result_names): result_l = result[:32492] result_r = result[32492:] result_list = [result_l, result_r] for _result, surf, lr in zip(result_list, surfs, l_r): path = os.path.join(out_dir, '{}_{}.gii'.format(name, lr)) gii_path = temp_dir.format(surf) ct_gii = nib.load(gii_path) gdarray = GiftiDataArray.from_array(_result, intent=0) ct_gii.remove_gifti_data_array_by_intent(0) ct_gii.add_gifti_data_array(gdarray) nib.save(ct_gii, path)
temp_dir = r'./data/mask/BN_Atlas_freesurfer/fsaverage/fsaverage_LR32k/{}' surfs = [ 'fsaverage.L.inflated.32k_fs_LR.surf.gii', 'fsaverage.R.inflated.32k_fs_LR.surf.gii' ] for test in tests: voxel_path = os.path.join(path, test, 'surf') for lr, surf in zip(l_r, surfs): es_path = os.path.join(voxel_path, 'es_{}.gii'.format(lr)) p_path = os.path.join(voxel_path, 'p_{}.gii'.format(lr)) es_array = load_surf_data(es_path)[-1] p_array = load_surf_data(p_path)[-1] voxel_count = np.size(p_array) * 2 for p in ps: corrected_array = voxelwise_correction(es_array, p_array, voxel_count, thres=p) new_f = os.path.join(voxel_path, 'es_bon_{}_{}.gii'.format(lr, str(p)[2:])) ct_gii = nib.load(temp_dir.format(surf)) gdarray = GiftiDataArray.from_array(corrected_array, intent=0) ct_gii.add_gifti_data_array(gdarray) nib.save(ct_gii, new_f) # %%