Ejemplo n.º 1
0
def test_create_second_level_design():
    subjects_label = ['02', '01']  # change order to test right output order
    regressors = [['01', 0.1], ['02', 0.75]]
    regressors = pd.DataFrame(regressors, columns=['subject_label', 'f1'])
    design = make_second_level_design_matrix(subjects_label, regressors)
    expected_design = np.array([[0.75, 1], [0.1, 1]])
    assert_array_equal(design, expected_design)
    assert len(design.columns) == 2
    assert len(design) == 2
Ejemplo n.º 2
0
    def fit(self, second_level_input, confounds=None, design_matrix=None):
        """ Fit the second-level GLM

        1. create design matrix
        2. do a masker job: fMRI_data -> Y
        3. fit regression to (Y, X)

        Parameters
        ----------
        second_level_input: list of `FirstLevelModel` objects or pandas
                            DataFrame or list of Niimg-like objects.

            Giving FirstLevelModel objects will allow to easily compute
            the second level contrast of arbitrary first level contrasts thanks
            to the first_level_contrast argument of the compute_contrast
            method. Effect size images will be computed for each model to
            contrast at the second level.

            If a pandas DataFrame, then they have to contain subject_label,
            map_name and effects_map_path. It can contain multiple maps that
            would be selected during contrast estimation with the argument
            first_level_contrast of the compute_contrast function. The
            DataFrame will be sorted based on the subject_label column to avoid
            order inconsistencies when extracting the maps. So the rows of the
            automatically computed design matrix, if not provided, will
            correspond to the sorted subject_label column.

            If list of Niimg-like objects then this is taken literally as Y
            for the model fit and design_matrix must be provided.

        confounds : pandas DataFrame, optional
            Must contain a subject_label column. All other columns are
            considered as confounds and included in the model. If
            design_matrix is provided then this argument is ignored.
            The resulting second level design matrix uses the same column
            names as in the given DataFrame for confounds. At least two columns
            are expected, "subject_label" and at least one confound.

        design_matrix : pandas DataFrame, optional
            Design matrix to fit the GLM. The number of rows
            in the design matrix must agree with the number of maps derived
            from second_level_input.
            Ensure that the order of maps given by a second_level_input
            list of Niimgs matches the order of the rows in the design matrix.

        """
        # check second_level_input
        _check_second_level_input(second_level_input,
                                  design_matrix,
                                  confounds=confounds)

        # check confounds
        _check_confounds(confounds)

        # check design matrix
        _check_design_matrix(design_matrix)

        if isinstance(second_level_input, pd.DataFrame):
            second_level_input = _sort_input_dataframe(second_level_input)
        self.second_level_input_ = second_level_input
        self.confounds_ = confounds
        sample_map, subjects_label = _process_second_level_input(
            second_level_input)

        # Report progress
        t0 = time.time()
        if self.verbose > 0:
            sys.stderr.write("Fitting second level model. "
                             "Take a deep breath\r")

        # Create and set design matrix, if not given
        if design_matrix is None:
            design_matrix = make_second_level_design_matrix(
                subjects_label, confounds)
        self.design_matrix_ = design_matrix

        # Learn the mask. Assume the first level imgs have been masked.
        if not isinstance(self.mask_img, NiftiMasker):
            self.masker_ = NiftiMasker(mask_img=self.mask_img,
                                       target_affine=self.target_affine,
                                       target_shape=self.target_shape,
                                       smoothing_fwhm=self.smoothing_fwhm,
                                       memory=self.memory,
                                       verbose=max(0, self.verbose - 1),
                                       memory_level=self.memory_level)
        else:
            self.masker_ = clone(self.mask_img)
            for param_name in ['smoothing_fwhm', 'memory', 'memory_level']:
                our_param = getattr(self, param_name)
                if our_param is None:
                    continue
                if getattr(self.masker_, param_name) is not None:
                    warn('Parameter %s of the masker overridden' % param_name)
                setattr(self.masker_, param_name, our_param)
        self.masker_.fit(sample_map)

        # Report progress
        if self.verbose > 0:
            sys.stderr.write("\nComputation of second level model done in "
                             "%i seconds\n" % (time.time() - t0))

        return self