예제 #1
0
파일: glm.py 프로젝트: JohnGriffiths/spym
def _first_level(out_dir, data, design_matrices, contrasts,
                 glm_model='ar1', mask='compute', verbose=1):
    if verbose:
        print '%s:' % out_dir

    data = check_niimgs(data)
    design_matrices = check_design_matrices(design_matrices)
    contrasts = check_contrasts(contrasts)
    glm = FMRILinearModel(data, design_matrices, mask=mask)
    glm.fit(do_scaling=True, model=glm_model)

    for i, contrast_id in enumerate(contrasts):
        if verbose:
            print '  %s/%s - %s ' % (i, len(contrasts), contrast_id)

        con_val = []
        for session_con, session_dm in zip(contrasts[contrast_id],
                                           design_matrices):

            con = np.zeros(session_dm.shape[1])
            con[:len(session_con)] = session_con
            con_val.append(con)

        z_map, t_map, c_map, var_map = glm.contrast(
            con_val,
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True,)

        for dtype, img in zip(['z', 't', 'c', 'var'],
                              [z_map, t_map, c_map, var_map]):

            map_dir = os.path.join(out_dir, '%s_maps' % dtype)

            if not os.path.exists(map_dir):
                os.makedirs(map_dir)

            path = os.path.join(
                map_dir, '%s.nii.gz' % remove_special(contrast_id))
            nb.save(img, path)

    nb.save(glm.mask, os.path.join(out_dir, 'mask.nii.gz'))
예제 #2
0
 def _filename(self, ext='json'):
     return str(self.number) + ' - ' + remove_special(self.title) + '.' + ext
예제 #3
0
def _openfmri_intra(out_dir, doc, metadata=None, verbose=1):
    """
        Parameters
        ----------
        metadata: dict
            - condition_key
              https://openfmri.org/content/metadata-condition-key

        Examples
        --------
        {'condition_key': {'task001 cond001': 'task',
                           'task001 cond002': 'parametric gain'}}
    """
    if 'study_id' in doc:
        study_dir = os.path.join(out_dir, doc['study_id'])
    else:
        study_dir = out_dir

    if verbose > 0:
        print '%s@%s: dumping stats intra' % (doc['subject_id'],
                                              doc['study_id'])

    subject_dir = os.path.join(study_dir, doc['subject_id'])

    model_dir = os.path.join(study_dir, 'models', 'model001')

    if not os.path.exists(model_dir):
        os.makedirs(model_dir)

    # conditions specification
    conditions_spec = []
    for key, val in sorted(metadata['condition_key'].iteritems()):
        for i, name in enumerate(val):
            conditions_spec.append(
                '%s cond%03i %s\n' % (key.split(' ')[0], i + 1, name))

    with open(os.path.join(model_dir, 'condition_key.txt'), 'wb') as f:
                f.write(''.join(sorted(set(conditions_spec))))

    # contrasts specification
    contrasts_spec = []
    for key, val in doc['task_contrasts'].iteritems():
        if 'task_contrasts' in metadata:
            key = doc['task_contrasts'][key]

        for i, session_contrast in enumerate(val):
            task_id = metadata['run_key'][i].split(' ')[0]
            # check not null and 1d
            if (np.abs(session_contrast).sum() > 0
                and len(np.array(session_contrast).shape) == 1):
                con = ' '.join(np.array(session_contrast).astype('|S32'))
                contrasts_spec.append('%s %s %s\n' % (task_id, key, con))

    with open(os.path.join(model_dir, 'task_contrasts.txt'), 'wb') as f:
        f.write(''.join(sorted(set(contrasts_spec))))

    # dump onsets
    model_dir = os.path.join(subject_dir, 'model', 'model001')
    onsets_dir = os.path.join(model_dir, 'onsets')

    for onsets, run_key in zip(doc['onsets'], metadata['run_key']):
        run_dir = os.path.join(onsets_dir, run_key.replace(' ', '_'))
        if not os.path.exists(run_dir):
            os.makedirs(run_dir)
        for condition_id, values in sorted(onsets.iteritems()):
            cond = os.path.join(run_dir, '%s.txt' % condition_id)
            with open(cond, 'wb') as f:
                for timepoint in values:
                    f.write('%s %s %s\n' % timepoint)

    # analyses
    for dtype in ['c_maps', 't_maps']:
        data_dir = os.path.join(model_dir, dtype)
        if not os.path.exists(data_dir):
            os.makedirs(data_dir)
        if isinstance(doc[dtype], dict):
            for contrast_id in doc[dtype].keys():
                fname = remove_special(contrast_id)
                img = nb.load(doc[dtype][contrast_id])
                nb.save(img, os.path.join(data_dir, '%s.nii.gz' % fname))

    # general data for analysis
    img = nb.load(doc['mask'])
    nb.save(img, os.path.join(model_dir, 'mask.nii.gz'))
    json.dump(doc, open(os.path.join(model_dir, 'SPM.json'), 'wb'))
예제 #4
0
 def _filename(self, ext='json'):
     return str(self.number) + ' - ' + remove_special(
         self.title) + '.' + ext