] subjects = [subject for subject in subjects if subject] # level 2 stats_start_time = pretty_time() mask_images = [subject_data.mask for subject_data in subjects] group_mask = nibabel.Nifti1Image( intersect_masks(mask_images).astype(np.int8), nibabel.load(mask_images[0]).get_affine()) nibabel.save(group_mask, os.path.join(task_output_dir, "mask.nii.gz")) print "... done.\r\n" print "Group GLM" contrasts = subjects[0].contrasts subjects_effects_maps = [ subject_data.effects_maps for subject_data in subjects ] group_one_sample_t_test( mask_images, subjects_effects_maps, contrasts, task_output_dir, threshold=threshold, cluster_th=cluster_th, start_time=stats_start_time, subjects=[subject_data.subject_id for subject_data in subjects], title='Group GLM for HCP fMRI %s protocol (%i subjects)' % (protocol, len(subjects)), slicer=slicer)
# paradigm=paradigm, frametimes=frametimes, # drift_model=drift_model, hrf_model=hrf_model) # ProgressReport().finish_dir(subject_output_dir) return dict(subject_id=subject_id, mask=mask_path, effects_maps=effects_maps, z_maps=z_maps, contrasts=contrasts) # first level GLM mem = Memory(os.path.join(output_dir, "cache_dir")) n_jobs = min(n_jobs, len(subject_ids)) first_levels = Parallel(n_jobs=n_jobs)( delayed(mem.cache(do_subject_glm))(subject_id) for subject_id in subject_ids) # run second-level GLM group_zmaps = group_one_sample_t_test( [subject_data["mask"] for subject_data in first_levels], [subject_data["effects_maps"] for subject_data in first_levels], first_levels[0]["contrasts"], output_dir, threshold=2.) plot_prob_atlas([zmap for zmap in group_zmaps.values() if "_minus_" in zmap], threshold=1.2, view_type="filled_contours") plt.savefig("group_zmaps.png") show()
if not os.path.isfile(eff_map): skip = True break subject_effects_maps[condition] = eff_map if skip: continue effects_maps.append(subject_effects_maps) if anat is None: tmp = os.path.join(subject_dir, "wT1w_acpc_dc_restore_brain.nii") if os.path.isfile(tmp): anat = tmp contrasts = json.load(open("/tmp/contrasts.json")) contrasts = dict( (k, v) for k, v in contrasts.iteritems() if k in conditions) group_one_sample_t_test( masks, effects_maps, contrasts, "/tmp", slicer="ortho", threshold=3., anat=nibabel.load(anat).get_data() if anat else None, anat_affine=nibabel.load(anat).get_affine() if anat else None, title="HCP group GLM (MOTOR protocol, %i subjects)" % len(masks))
subjects = [subject for subject in subjects if subject] # level 2 stats_start_time = pretty_time() mask_images = [subject_data.mask for subject_data in subjects] group_mask = nibabel.Nifti1Image( intersect_masks(mask_images).astype(np.int8), nibabel.load(mask_images[0]).get_affine()) nibabel.save(group_mask, os.path.join( task_output_dir, "mask.nii.gz")) print "... done.\r\n" print "Group GLM" contrasts = subjects[0].contrasts subjects_effects_maps = [subject_data.effects_maps for subject_data in subjects] group_one_sample_t_test( mask_images, subjects_effects_maps, contrasts, task_output_dir, threshold=threshold, cluster_th=cluster_th, start_time=stats_start_time, subjects=[subject_data.subject_id for subject_data in subjects], title='Group GLM for HCP fMRI %s protocol (%i subjects)' % ( protocol, len(subjects)), slicer=slicer )
if not os.path.isfile(eff_map): skip = True break subject_effects_maps[condition] = eff_map if skip: continue effects_maps.append(subject_effects_maps) if anat is None: tmp = os.path.join(subject_dir, "wT1w_acpc_dc_restore_brain.nii") if os.path.isfile(tmp): anat = tmp contrasts = json.load(open("/tmp/contrasts.json")) contrasts = dict((k, v) for k, v in contrasts.iteritems() if k in conditions) group_one_sample_t_test( masks, effects_maps, contrasts, "/tmp", slicer="ortho", threshold=3.0, anat=nibabel.load(anat).get_data() if anat else None, anat_affine=nibabel.load(anat).get_affine() if anat else None, title="HCP group GLM (MOTOR protocol, %i subjects)" % len(masks), )
# stats_report_filename = os.path.join(subject_output_dir, "reports", # "report_stats.html") # generate_subject_stats_report( # stats_report_filename, contrasts, z_maps, fmri_glm.mask, anat=anat, # threshold=2.3, cluster_th=15, design_matrices=design_matrices, TR=tr, # subject_id="sub001", n_scans=n_scans, hfcut=hfcut, # paradigm=paradigm, frametimes=frametimes, # drift_model=drift_model, hrf_model=hrf_model) # ProgressReport().finish_dir(subject_output_dir) return dict(subject_id=subject_id, mask=mask_path, effects_maps=effects_maps, z_maps=z_maps, contrasts=contrasts) # first level GLM mem = Memory(os.path.join(output_dir, "cache_dir")) n_jobs = min(n_jobs, len(subject_ids)) first_levels = Parallel(n_jobs=n_jobs)(delayed(mem.cache(do_subject_glm))( subject_id) for subject_id in subject_ids) # run second-level GLM group_zmaps = group_one_sample_t_test( [subject_data["mask"] for subject_data in first_levels], [subject_data["effects_maps"] for subject_data in first_levels], first_levels[0]["contrasts"], output_dir, threshold=2.) plot_prob_atlas([zmap for zmap in group_zmaps.values() if "_minus_" in zmap], threshold=1.2, view_type="filled_contours") plt.savefig("group_zmaps.png") show()