def dummy_test(infile, expfile):

    # load input test data
    ifile = open(infile, "br")
    idic = pickle.load(ifile)
    ifile.close()

    slm1 = SLM(Term(1), Term(1))
    slm2 = SLM(Term(1), Term(2))
    for key in idic.keys():
        if "1" in key:
            setattr(slm1, key[4:], idic[key])
        elif "2" in key:
            setattr(slm2, key[4:], idic[key])

    # run f test
    outdic = f_test(slm1, slm2)

    # load expected outout data
    efile = open(expfile, "br")
    expdic = pickle.load(efile)
    efile.close()

    testout = []

    for key in expdic.keys():
        comp = np.allclose(getattr(outdic, key),
                           expdic[key],
                           rtol=1e-05,
                           equal_nan=True)
        testout.append(comp)

    assert all(flag == True for (flag) in testout)
Exemple #2
0
def dummy_test(infile, expfile):

    # load input test data
    ifile = open(infile, "br")
    idic = pickle.load(ifile)
    ifile.close()

    slm = SLM(Term(1), Term(1))
    for key in idic.keys():
        setattr(slm, key, idic[key])

    resels_py, reselspvert_py, edg_py = compute_resels(slm)

    out = {}
    out["resels"] = resels_py
    out["reselspvert"] = reselspvert_py
    out["edg"] = edg_py

    # load expected outout data
    efile = open(expfile, "br")
    expdic = pickle.load(efile)
    efile.close()

    testout = []

    for key in out.keys():
        if out[key] is not None and expdic[key] is not None:
            comp = np.allclose(out[key],
                               expdic[key],
                               rtol=1e-05,
                               equal_nan=True)
            testout.append(comp)

    assert all(flag == True for (flag) in testout)
Exemple #3
0
def create_parameter_grid(samples, predictors):
    """Creates a parameter grid for the test function.

    Returns
    -------
    ParameterGrid
        All pairings of parameters to be run through the SLM class.
    """
    model = [
        Term(1) +
        Term(np.random.rand(samples, predictors), names=["y1", "y2", "y3"])
    ]

    Y_idx = [1, 2, 3]
    contrast = [np.random.rand(samples), Term(np.random.rand(samples))]
    surf = [None, read_surface_gz(fetch_surf_fsaverage()["pial_left"])]
    mask = [None, np.random.rand(10242) > 0.1]
    correction = [None, ["rft", "fdr"]]
    two_tailed = [False, True]
    param_grid = ParameterGrid({
        "Y_idx": Y_idx,
        "model": model,
        "contrast": contrast,
        "surf": surf,
        "mask": mask,
        "correction": correction,
        "two_tailed": two_tailed,
    })
    return param_grid
def dummy_test(infile, expfile):

    # load input test data
    ifile = open(infile, "br")
    idic = pickle.load(ifile)
    ifile.close()

    slm = SLM(Term(1), Term(1))
    for key in idic.keys():
        setattr(slm, key, idic[key])

    # run _t_test
    t_test(slm)

    # load expected outout data
    efile = open(expfile, "br")
    expdic = pickle.load(efile)
    efile.close()

    testout = []
    for key in expdic.keys():
        comp = np.allclose(getattr(slm, key),
                           expdic[key],
                           rtol=1e-05,
                           equal_nan=True)
        testout.append(comp)

    assert all(flag == True for (flag) in testout)
def get_linmod_output(Y, M, foutname, tri=None, lat=None):
    """ Runs linmod and returns all relevant output. """
    slm = SLM(M, Term(1))

    if tri is not None:
        slm.surf = {"tri": tri}
    if lat is not None:
        slm.lat = {"lat": lat}

    slm.linear_model(Y)

    keys = [
        "cluster_threshold",
        "coef",
        "df",
        "drlim",
        "niter",
        "resl",
        "SSE",
        "thetalim",
        "X",
        "tri",
    ]

    D = {}
    for key in keys:
        if getattr(slm, key) is not None:
            D[key] = getattr(slm, key)

    with open(foutname, "wb") as handle:
        pickle.dump(D, handle, protocol=pickle.HIGHEST_PROTOCOL)

    return D
Exemple #6
0
def dummy_test(infile, expfile, simple=True):

    ifile = open(infile, "br")
    Din = pickle.load(ifile)
    ifile.close()

    Y = Din["Y"]
    M = Din["M"]

    # assign slm params
    slm = SLM(M, Term(1))

    if "tri" in Din:
        slm.surf = {"tri": Din["tri"]}
    if "lat" in Din:
        slm.surf = {"lat": Din["lat"]}

    # here we go --> run the linear model
    slm.linear_model(Y)

    ofile = open(expfile, "br")
    Dout = pickle.load(ofile)
    ofile.close()

    # compare...
    testout = []
    for makey_ in Dout.keys():
        comp = np.allclose(
            getattr(slm, makey_), Dout[makey_], rtol=1e-05, equal_nan=True
        )
        testout.append(comp)
    assert all(flag == True for (flag) in testout)
Exemple #7
0
def dummy_test(infile, expfile):

    # load input test data
    ifile = open(infile, "br")
    idic = pickle.load(ifile)
    ifile.close()

    slm = SLM(Term(1), Term(1))
    for key in idic.keys():
        if key == "clusthresh":
            slm.cluster_threshold = idic[key]
        else:
            setattr(slm, key, idic[key])
    empirical_output = random_field_theory(slm)

    # load expected outout data
    efile = open(expfile, "br")
    expdic = pickle.load(efile)
    efile.close()
    expected_output = (expdic["pval"], expdic["peak"], expdic["clus"],
                       expdic["clusid"])

    testout = []
    for (empirical, expected) in zip(empirical_output, expected_output):
        if isinstance(expected, dict):
            for key in expected:
                if key == "mask":
                    continue
                comp = np.allclose(empirical[key],
                                   expected[key],
                                   rtol=1e-05,
                                   equal_nan=True)
                testout.append(comp)
        else:
            if len(expected) is not 0:
                comp = np.allclose(empirical,
                                   expected,
                                   rtol=1e-05,
                                   equal_nan=True)
                testout.append(comp)

    assert all(flag == True for (flag) in testout)
def dummy_test(infile, expfile):

    # load input test data
    ifile = open(infile, "br")
    idic = pickle.load(ifile)
    ifile.close()

    slm = SLM(Term(1), Term(1))
    for key in idic.keys():
        setattr(slm, key, idic[key])

    # run fdr
    Q = fdr(slm)

    # load expected outout data
    # Note: expected dicts contain a "mask" key. this has been removed in our
    # current implementation.
    efile = open(expfile, "br")
    expdic = pickle.load(efile)
    efile.close()

    assert np.allclose(Q, expdic["Q"])
def generate_random_test_data(
    Y_dim,
    M_dim,
    finname,
    seed=0,
    triD=None,
    latD=None,
    M_term=False,
    add_intercept=True,
):
    """ Generate random test datasets. """
    # Y_dim : tuple
    # M_dim : tuple
    # finname : filename ending with *pkl
    np.random.seed(seed=seed)
    Y = np.random.random_sample(Y_dim)
    M = np.random.random_sample(M_dim)
    if add_intercept:
        M = np.concatenate((np.ones((M_dim[0], 1)), M), axis=1)
    if M_term:
        M = Term(M)

    D = {}
    D["Y"] = Y
    D["M"] = M

    if triD is not None:
        tri = np.random.randint(triD["tri_min"], triD["tri_max"], size=triD["tri_dim"])
        D["tri"] = tri

    if latD is not None:
        lat = np.random.randint(latD["lat_min"], latD["lat_max"], size=latD["lat_dim"])
        D["lat"] = lat

    with open(finname, "wb") as handle:
        pickle.dump(D, handle, protocol=pickle.HIGHEST_PROTOCOL)

    if triD is not None:
        return Y, M, tri
    elif latD is not None:
        return Y, M, lat
    else:
        return Y, M
Exemple #10
0
thickness = np.zeros((n, 10242))
for i in range(n):
    thickness[i, :] = np.squeeze(nib.load(files[i, 0]).get_fdata())
mask = np.all(thickness != 0, axis=0)

pial_left = read_surface_gz(fetch_surf_fsaverage()["pial_left"])

###################################################################
# Next, we can create a BrainStat linear model by declaring these variables as
# terms. The term class requires two things: 1) an array or scalar, and 2) a
# variable name for each column. Lastly, we can create the model by simply
# adding the terms together.

from brainstat.stats.terms import Term

term_intercept = Term(1, names="intercept")
term_age = Term(age, "age")
term_iq = Term(iq, "iq")
model = term_intercept + term_age + term_iq

###################################################################
# We can also add interaction effects to the model by multiplying terms.

model_interaction = term_intercept + term_age + term_iq + term_age * term_iq

###################################################################
# Now, lets imagine we have some cortical marker (e.g. cortical thickness) for
# each subject and we want to evaluate whether this marker changes with age
# whilst correcting for effects of sex and age-sex interactions.

from brainstat.stats.SLM import SLM
def dummy_test(infile, expfile):

    # load input test data
    ifile = open(infile, "br")
    idic = pickle.load(ifile)
    ifile.close()

    slm = SLM(Term(1), Term(1))
    slm.t = idic["t"]
    slm.tri = idic["tri"]
    slm.mask = idic["mask"]
    thresh = idic["thresh"]
    reselspvert = None
    edg = None

    if "reselspvert" in idic.keys():
        reselspvert = idic["reselspvert"]

    if "edg" in idic.keys():
        edg = idic["edg"]

    if "k" in idic.keys():
        slm.k = idic["k"]

    if "df" in idic.keys():
        slm.df = idic["df"]

    # call python function
    P_peak, P_clus, P_clusid = peak_clus(slm, thresh, reselspvert, edg)

    # load expected outout data
    efile = open(expfile, "br")
    expdic = pickle.load(efile)
    efile.close()

    O_peak = expdic["peak"]
    O_clus = expdic["clus"]
    O_clusid = expdic["clusid"]

    testout = []

    if isinstance(P_peak, dict):
        for key in P_peak.keys():
            comp = np.allclose(P_peak[key],
                               O_peak[key],
                               rtol=1e-05,
                               equal_nan=True)
            testout.append(comp)
    else:
        comp = np.allclose(P_peak, O_peak, rtol=1e-05, equal_nan=True)

    if isinstance(P_clus, dict):
        for key in P_clus.keys():
            comp = np.allclose(P_clus[key],
                               O_clus[key],
                               rtol=1e-05,
                               equal_nan=True)
    else:
        comp = np.allclose(P_clus, O_clus, rtol=1e-05, equal_nan=True)
    testout.append(comp)

    testout.append(np.allclose(P_clusid, O_clusid, rtol=1e-05, equal_nan=True))

    assert all(flag == True for (flag) in testout)