示例#1
0
def _create_nod(metapath, fidlpath, scode):
    triallen = 5

    meta = pd.read_csv(metapath)
    faceshouses = np.array(meta["exp"].tolist())
    trs = np.array(meta["TR"].tolist())
    trial_index = np.array(meta["trialcount"].tolist())

    targets = construct_targets(
            trs=trs,
            faceshouses=faceshouses,
            trial_index=trial_index)

    keepers = ["face", "house"]
    keep_fhs = construct_filter(targets["faceshouses"], keepers, True)
    targets = filter_targets(keep_fhs, targets)
    
    names = targets["faceshouses"]
    onsets = targets["trs"]
    durations = np.array([triallen, ] * len(targets["trial_index"]))

    nod_mat(names, onsets, durations, os.path.join(fidlpath, 
            "nod_" + scode + "_stim_facehouse.mat"))
示例#2
0
    def run(self, basename, smooth=False, filtfile=None, 
        n=None, tr=None, n_rt=None, n_trials_per_cond=None,
        durations=None ,noise=None, n_features=None, n_univariate=None, 
        n_accumulator=None, n_decision=None, n_noise=None, 
        n_repeated=None, drift_noise=False, step_noise=False):
        
        # Write init
        mode = 'w'
        header = True

        for scode in range(n):
            # If were past the first Ss data, append.
            if scode > 0:
                mode = 'a'
                header = False

            # Create the data
            X, y, y_trialcount = make_bold(
                    n_rt, 
                    n_trials_per_cond, 
                    tr, 
                    durations=durations, 
                    noise=noise, 
                    n_features=n_features, 
                    n_univariate=n_univariate, 
                    n_accumulator=n_accumulator, 
                    n_decision=n_decision,
                    n_noise=n_noise,
                    n_repeated=n_repeated,
                    drift_noise=drift_noise,
                    step_noise=step_noise)

            targets = construct_targets(trial_index=y_trialcount, y=y)

            # Drop baseline trials created by make_bold
            baselinemask = np.arange(y.shape[0])[y != 0]
            X = X[baselinemask, ]
            targets = filter_targets(baselinemask, targets)

            # Filter and
            if filtfile is not None:
                X, targets = filterX(filtfile, X, targets)
            if smooth:
                X = smoothfn(X, tr=1.5, ub=0.10, lb=0.001)
            
            # Normalize
            norm = MinMaxScaler((0,1))
            X = norm.fit_transform(X.astype(np.float))
            
            # finally decompose.
            Xcs, csnames, ti_cs = self.spacetime.fit_transform(
                    X, targets["y"], targets["trial_index"], 
                    self.window)
            
            # Name them,
            csnames = unique_nan(y)
            csnames = sort_nanfirst(csnames)

            # and write.
            for Xc, csname, ti in zip(Xcs, csnames, ti_cs):
                save_tcdf(
                        name=join_by_underscore(True, basename, csname), 
                        X=Xc, 
                        cond=csname,
                        dataname=join_by_underscore(False, 
                                os.path.split(basename)[-1], scode),
                        index=ti.astype(np.int),
                        header=header, 
                        mode=mode,
                        float_format="%.{0}f".format(self.nsig))