예제 #1
0
def create_betas_per_trial_with_pymvpa_roni(study_path, subj, conf, mask_name, flavor, TR):
    dhandle = OpenFMRIDataset(study_path)
    model = 1
    task = 1
    # Do this for other tasks as well. not only the first
    mask_fname = _opj(study_path, "sub{:0>3d}".format(subj), "masks", conf.mvpa_tasks[0], "{}.nii.gz".format(mask_name))
    print mask_fname
    run_datasets = []
    for run_id in dhandle.get_task_bold_run_ids(task)[subj]:
        if type(run_id) == str:
            continue

            # all_events = dhandle.get_bold_run_model(model, subj, run_id)
        all_events = get_bold_run_model(dhandle, 2, subj, run_id)
        run_events = []
        i = 0
        for event in all_events:
            if event["task"] == task:
                event["condition"] = "{}-{}".format(event["condition"], event["id"])
                run_events.append(event)
                i += 1

                # load BOLD data for this run (with masking); add 0-based chunk ID
        run_ds = dhandle.get_bold_run_dataset(subj, task, run_id, flavor=flavor, chunks=run_id - 1, mask=mask_fname)
        # convert event info into a sample attribute and assign as 'targets'
        run_ds.sa.time_coords = run_ds.sa.time_indices * TR
        run_ds.sa["targets"] = events2sample_attr(run_events, run_ds.sa.time_coords, noinfolabel="rest")
        # additional time series preprocessing can go here
        poly_detrend(run_ds, polyord=1, chunks_attr="chunks")
        zscore(run_ds, chunks_attr="chunks", param_est=("targets", ["rest"]), dtype="float32")
        glm_dataset = fit_event_hrf_model(run_ds, run_events, time_attr="time_coords", condition_attr="condition")
        glm_dataset.sa["targets"] = [x[: x.find("-")] for x in glm_dataset.sa.condition]
        glm_dataset.sa["id"] = [x[x.find("-") + 1 :] for x in glm_dataset.sa.condition]
        glm_dataset.sa.condition = glm_dataset.sa["targets"]
        glm_dataset.sa["chunks"] = [run_id - 1] * len(glm_dataset.samples)

        # If a trial was dropped (the subject pressed on a button) than the counter trial from the
        # other condition should also be dropped
        for pair in conf.conditions_to_compare:
            cond_bool = np.array([c in pair for c in glm_dataset.sa["condition"]])
            sub_dataset = glm_dataset[cond_bool]
            c = Counter(sub_dataset.sa.id)
            for value in c:
                if c[value] < 2:
                    id_bool = np.array([value in cond_id for cond_id in glm_dataset.sa["id"]])
                    glm_dataset = glm_dataset[np.bitwise_not(np.logical_and(id_bool, cond_bool))]

        run_datasets.append(glm_dataset)

    return vstack(run_datasets, 0)
예제 #2
0
def test_openfmri_dataset():
    of = OpenFMRIDataset(os.path.join(pymvpa_dataroot, 'openfmri'))
    sub_ids = of.get_subj_ids()
    assert_equal(sub_ids, [1, 'phantom'])
    assert_equal(of.get_scan_properties(), {'TR': '2.5'})
    tasks = of.get_task_descriptions()
    assert_equal(tasks, {1: 'object viewing'})
    task = tasks.keys()[0]
    run_ids = of.get_bold_run_ids(sub_ids[0], task)
    assert_equal(run_ids, range(1, 13))
    task_runs = of.get_task_bold_run_ids(task)
    assert_equal(task_runs, {1: range(1, 13)})

    orig_attrs = SampleAttributes(os.path.join(pymvpa_dataroot,
                                               'attributes_literal.txt'))
    for subj, runs in task_runs.iteritems():
        for run in runs:
            # load single run
            ds = of.get_bold_run_dataset(subj, task, run, flavor='1slice',
                                         mask=os.path.join(pymvpa_dataroot,
                                                           'mask.nii.gz'),
                                         add_sa='bold_moest.txt')
            # basic shape
            assert_equal(len(ds), 121)
            assert_equal(ds.nfeatures, 530)
            # functional mapper
            assert_equal(ds.O.shape, (121, 40, 20, 1))
            # additional attributes present
            moest = of.get_bold_run_motion_estimates(subj, task, run)
            for i in range(6):
                moest_attr = 'bold_moest.txt_%i' % (i,)
                assert_true(moest_attr in ds.sa)
                assert_array_equal(moest[:,i], ds.sa[moest_attr].value)

            # check conversion of model into sample attribute
            events = of.get_bold_run_model(subj, task, run)
            targets = events2sample_attr(events,
                                         ds.sa.time_coords,
                                         noinfolabel='rest')
            assert_array_equal(
                orig_attrs['targets'][(run - 1) * 121: run * len(ds)], targets)
            assert_equal(ds.sa['subj'][0], subj)

    # more basic access
    motion = of.get_task_bold_attributes(1, 'bold_moest.txt', np.loadtxt)
    assert_equal(len(motion), 12) # one per run
    # one per subject, per volume, 6 estimates
    assert_equal([m.shape for m in motion], [(1, 121, 6)] * 12)
예제 #3
0
def create_betas_per_trial_with_pymvpa(study_path, subj, conf, mask_name, flavor, TR):
    dhandle = OpenFMRIDataset(study_path)
    model = 1
    task = 1
    # Do this for other tasks as well. not only the first
    mask_fname = _opj(study_path, "sub{:0>3d}".format(subj), "masks", conf.mvpa_tasks[0], "{}.nii.gz".format(mask_name))
    print mask_fname
    run_datasets = []
    for run_id in dhandle.get_task_bold_run_ids(task)[subj]:
        if type(run_id) == str:
            continue
        all_events = dhandle.get_bold_run_model(model, subj, run_id)
        run_events = []
        i = 0
        for event in all_events:
            if event["task"] == task:
                event["condition"] = "{}-{}".format(event["condition"], i)
                run_events.append(event)
                i += 1

                # load BOLD data for this run (with masking); add 0-based chunk ID
        run_ds = dhandle.get_bold_run_dataset(subj, task, run_id, flavor=flavor, chunks=run_id - 1, mask=mask_fname)
        # convert event info into a sample attribute and assign as 'targets'
        run_ds.sa.time_coords = run_ds.sa.time_indices * TR
        print run_id

        run_ds.sa["targets"] = events2sample_attr(run_events, run_ds.sa.time_coords, noinfolabel="rest")
        # additional time series preprocessing can go here
        poly_detrend(run_ds, polyord=1, chunks_attr="chunks")
        zscore(run_ds, chunks_attr="chunks", param_est=("targets", ["rest"]), dtype="float32")
        glm_dataset = fit_event_hrf_model(run_ds, run_events, time_attr="time_coords", condition_attr="condition")
        glm_dataset.sa["targets"] = [x[: x.find("-")] for x in glm_dataset.sa.condition]
        glm_dataset.sa.condition = glm_dataset.sa["targets"]
        glm_dataset.sa["chunks"] = [run_id - 1] * len(glm_dataset.samples)
        run_datasets.append(glm_dataset)
    return vstack(run_datasets, 0)