def test_fetch_bids_langloc_dataset(): data_dir = os.path.join(tst.tmpdir, 'bids_langloc_example') os.mkdir(data_dir) main_folder = os.path.join(data_dir, 'bids_langloc_dataset') os.mkdir(main_folder) datadir, dl_files = datasets.fetch_bids_langloc_dataset(tst.tmpdir) assert_true(isinstance(datadir, _basestring)) assert_true(isinstance(dl_files, list))
def test_fetch_bids_langloc_dataset(request_mocker, tmp_path): data_dir = str(tmp_path / 'bids_langloc_example') os.mkdir(data_dir) main_folder = os.path.join(data_dir, 'bids_langloc_dataset') os.mkdir(main_folder) datadir, dl_files = datasets.fetch_bids_langloc_dataset(str(tmp_path)) assert isinstance(datadir, _basestring) assert isinstance(dl_files, list)
:local: :depth: 1 """ ############################################################################## # Fetch example BIDS dataset # -------------------------- # We download an simplified BIDS dataset made available for illustrative # purposes. It contains only the necessary # information to run a statistical analysis using Nistats. The raw data # subject folders only contain bold.json and events.tsv files, while the # derivatives folder with preprocessed files contain preproc.nii and # confounds.tsv files. from nistats.datasets import fetch_bids_langloc_dataset data_dir, _ = fetch_bids_langloc_dataset() ############################################################################## # Here is the location of the dataset on disk print(data_dir) ############################################################################## # Obtain automatically FirstLevelModel objects and fit arguments # -------------------------------------------------------------- # From the dataset directory we obtain automatically FirstLevelModel objects # with their subject_id filled from the BIDS dataset. Moreover we obtain # for each model a dictionary with run_imgs, events and confounder regressors # since in this case a confounds.tsv file is available in the BIDS dataset. # To get the first level models we only have to specify the dataset directory # and the task_label as specified in the file names. from nistats.first_level_model import first_level_models_from_bids
from scipy.stats import norm import matplotlib.pyplot as plt from nistats.datasets import fetch_bids_langloc_dataset from nistats.first_level_model import first_level_models_from_bids from nistats.second_level_model import SecondLevelModel ############################################################################## # Fetch example BIDS dataset # -------------------------- # We download a partial example BIDS dataset. It contains only the necessary # information to run a statistical analysis using Nistats. The raw data # subject folders only contain bold.json and events.tsv files, while the # derivatives folder with preprocessed files contain preproc.nii and # confounds.tsv files. data_dir, _ = fetch_bids_langloc_dataset() ############################################################################## # Obtain automatically FirstLevelModel objects and fit arguments # -------------------------------------------------------------- # From the dataset directory we obtain automatically FirstLevelModel objects # with their subject_id filled from the BIDS dataset. Moreover we obtain # for each model a dictionary with run_imgs, events and confounder regressors # since in this case a confounds.tsv file is available in the BIDS dataset. # To get the first level models we only have to specify the dataset directory # and the task_label as specified in the file names. task_label = 'languagelocalizer' space_label = 'MNI152nonlin2009aAsym' models, models_run_imgs, models_events, models_confounds = \ first_level_models_from_bids( data_dir, task_label, space_label,