def main(): arguments = docopt(__doc__) input_dir = arguments['<input_dir>'] rest_files = arguments['<rest_file>'] user_output_dir = arguments['--output_dir'] debug = arguments['--debug'] if debug: logger.setLevel(logging.DEBUG) ciftify.utils.log_arguments(arguments) verify_wb_available() verify_FSL_available() if user_output_dir and os.listdir(user_output_dir): logger.debug( "Outputs found at {}. No work to do.".format(user_output_dir)) return with TempDir() as temp: brainmask = get_brainmask(input_dir) wm_mask, csf_mask = generate_masks(input_dir, temp) for image in rest_files: if not os.path.exists(image): logger.error( "Rest file {} does not exist. Skipping".format(image)) continue resampled_wm = resample_mask(image, wm_mask, temp) resampled_csf = resample_mask(image, csf_mask, temp) resampled_brainmask = resample_mask(image, brainmask, temp) image_name = get_image_name(image) output_path = get_output_path(user_output_dir, image) wm_csv = os.path.join(output_path, image_name + '_WM.csv') csf_csv = os.path.join(output_path, image_name + '_CSF.csv') global_signal_csv = os.path.join(output_path, image_name + '_GS.csv') ciftify_meants(image, resampled_wm, wm_csv, mask=resampled_brainmask) ciftify_meants(image, resampled_csf, csf_csv, mask=resampled_brainmask) ciftify_meants(image, resampled_brainmask, global_signal_csv)
def load_hemisphere_data(filename, wb_structure, suppress_echo=False): '''loads data from one hemisphere of dscalar,nii file''' with TempDir() as little_tempdir: ## separate the cifti file into left and right surfaces data_gii = os.path.join(little_tempdir, 'data.func.gii') run([ 'wb_command', '-cifti-separate', filename, 'COLUMN', '-metric', wb_structure, data_gii ], suppress_echo) # loads label table as dict and data as numpy array data = load_gii_data(data_gii) return data
def __split(self, output_loc, scene_file, logging, width, height): with TempDir() as tmp_dir: tmp_img = os.path.join(tmp_dir, "scene{}.png".format(self.index)) self.__show_scene(tmp_img, scene_file, logging, width, height) with Image.open(tmp_img) as img: half_the_height = height // 2 img_top = img.crop((0, 0, width, half_the_height)) img_btm = img.crop((0, half_the_height, width, height)) im2 = Image.new('RGBA', (int(width * 2), half_the_height)) im2.paste(img_top, (0, 0)) im2.paste(img_btm, (width, 0)) im2.save(output_loc) return output_loc
def make_image(self, output_loc, scene_file, logging='WARNING', width=600, height=400): montage_cmd = ['montage', '-mode', 'concatenate', '-tile', self.layout] with TempDir() as tmp_dir: for scene in self.scenes: tmp_path = os.path.join(tmp_dir, "{}.png".format(scene.name)) scene.make_image(tmp_path, scene_file, logging, width, height) montage_cmd.append(tmp_path) montage_cmd.append(output_loc) run(montage_cmd) self.path = output_loc
def __split(self, output_loc, scene_file, logging, width, height): with TempDir() as tmp_dir: tmp_img = os.path.join(tmp_dir, "scene{}.png".format(self.index)) self.__show_scene(tmp_img, scene_file, logging, width, height) tmp_top = os.path.join(tmp_dir, 'top.png') tmp_bottom = os.path.join(tmp_dir, 'bottom.png') run(['convert', tmp_img, '-crop', '100x50%+0+0', tmp_top]) run(['convert', tmp_img, '-crop', '100x50%+0+200', tmp_bottom]) run([ 'montage', '-mode', 'concatenate', '-tile', '2x1', tmp_top, tmp_bottom, output_loc ]) return output_loc
def load_hemisphere_labels(filename, wb_structure, map_number=1): '''separates dlabel file into left and right and loads label data''' with TempDir() as little_tempdir: ## separate the cifti file into left and right surfaces labels_gii = os.path.join(little_tempdir, 'data.label.gii') run([ 'wb_command', '-cifti-separate', filename, 'COLUMN', '-label', wb_structure, labels_gii ]) # loads label table as dict and data as numpy array gifti_img = nibabel.gifti.giftiio.read(labels_gii) atlas_data = gifti_img.getArraysFromIntent('NIFTI_INTENT_LABEL')[ map_number - 1].data atlas_dict = gifti_img.get_labeltable().get_labels_as_dict() return atlas_data, atlas_dict
def load_surfaces(filename, suppress_echo=False): ''' separate a cifti file into surfaces, then loads the surface data ''' ## separate the cifti file into left and right surfaces with TempDir() as tempdir: L_data_surf = os.path.join(tempdir, 'Ldata.func.gii') R_data_surf = os.path.join(tempdir, 'Rdata.func.gii') run([ 'wb_command', '-cifti-separate', filename, 'COLUMN', '-metric', 'CORTEX_LEFT', L_data_surf, '-metric', 'CORTEX_RIGHT', R_data_surf ], suppress_echo=suppress_echo) ## load both surfaces and concatenate them together Ldata = load_gii_data(L_data_surf) Rdata = load_gii_data(R_data_surf) return Ldata, Rdata
def get_surf_distances(surf, orig_vertex, radius_search=100, dryrun=False, suppress_echo=False): ''' uses wb_command -surface-geodesic-distance command to measure distance between two vertices on the surface ''' with TempDir() as tmpdir: surf_distance = os.path.join(tmpdir, "distancecalc.shape.gii") run([ 'wb_command', '-surface-geodesic-distance', surf, str(orig_vertex), surf_distance, '-limit', str(radius_search) ], dryrun=dryrun, suppress_echo=suppress_echo) distances = load_gii_data(surf_distance) return (distances)
def load_cifti(filename): """ Usage: cifti, affine, header, dims = load_cifti(filename) Loads a Cifti file (6 dimensions). Returns: a 2D matrix of voxels x timepoints, """ logger = logging.getLogger(__name__) # load everything in try: cifti = nib.load(filename) except: logger.error("Cannot read {}".format(filename)) sys.exit(1) ## separate the cifti file into left and right surfaces with TempDir() as tempdir: L_data_surf = os.path.join(tempdir, 'Ldata.func.gii') R_data_surf = os.path.join(tempdir, 'Rdata.func.gii') vol_data_nii = os.path.join(tempdir, 'vol.nii.gz') run([ 'wb_command', '-cifti-separate', filename, 'COLUMN', '-metric', 'CORTEX_LEFT', L_data_surf, '-metric', 'CORTEX_RIGHT', R_data_surf, '-volume-all', vol_data_nii ]) ## load both surfaces and concatenate them together Ldata = load_gii_data(L_data_surf) Rdata = load_gii_data(R_data_surf) voldata, _, _, _ = load_nifti(vol_data_nii) cifti_data = np.vstack((Ldata, Rdata, voldata)) return cifti_data