Example #1
0
def mapper(key, output_collector):
    import mapreduce as GLOBAL  # access to global variables:
    #raise ImportError("could not import ")
    # GLOBAL.DATA, GLOBAL.STRUCTURE, GLOBAL.A
    # GLOBAL.DATA ::= {"X":[Xtrain, ytrain], "y":[Xtest, ytest]}
    # key: list of parameters
    Xtr = GLOBAL.DATA_RESAMPLED["X"][0]
    Xte = GLOBAL.DATA_RESAMPLED["X"][1]
    ytr = GLOBAL.DATA_RESAMPLED["y"][0]
    yte = GLOBAL.DATA_RESAMPLED["y"][1]
    print key, "Data shape:", Xtr.shape, Xte.shape, ytr.shape, yte.shape
    # STRUCTURE = GLOBAL.STRUCTURE
    #alpha, ratio_l1, ratio_l2, ratio_tv, k = key
    #key = np.array(key)
    penalty_start = GLOBAL.CONFIG["penalty_start"]
    class_weight = "auto"  # unbiased
    alpha = float(key[0])
    l1, l2, tv, k = alpha * float(key[1]), alpha * float(
        key[2]), alpha * float(key[3]), key[4]
    print "l1:%f, l2:%f, tv:%f, k:%i" % (l1, l2, tv, k)
    if k != -1:
        k = int(k)
        aov = SelectKBest(k=k)
        aov.fit(Xtr[..., penalty_start:], ytr.ravel())
        mask = GLOBAL.mask != 0
        mask[mask] = aov.get_support()
        #print mask.sum()
        A, _ = tv_helper.nesterov_linear_operator_from_mesh(
            GLOBAL.mesh_coord, GLOBAL.mesh_triangles, mask)
        Xtr_r = np.hstack([
            Xtr[:, :penalty_start], Xtr[:, penalty_start:][:,
                                                           aov.get_support()]
        ])
        Xte_r = np.hstack([
            Xte[:, :penalty_start], Xte[:, penalty_start:][:,
                                                           aov.get_support()]
        ])
    else:
        mask = np.ones(Xtr.shape[0], dtype=bool)
        Xtr_r = Xtr
        Xte_r = Xte
        A = GLOBAL.A
    mod = LogisticRegressionL1L2TV(l1,
                                   l2,
                                   tv,
                                   A,
                                   penalty_start=penalty_start,
                                   class_weight=class_weight)
    mod.fit(Xtr_r, ytr)
    y_pred = mod.predict(Xte_r)
    proba_pred = mod.predict_probability(Xte_r)
    ret = dict(y_pred=y_pred,
               y_true=yte,
               beta=mod.beta,
               mask=mask,
               proba_pred=proba_pred)
    if output_collector:
        output_collector.collect(key, ret)
    else:
        return ret
Example #2
0
def load_globals(config):
    import mapreduce as GLOBAL  # access to global variables
    GLOBAL.DATA = GLOBAL.load_data(config["data"])
    import brainomics.mesh_processing as mesh_utils
    mesh_coord, mesh_triangles = mesh_utils.mesh_arrays(
        config["structure"]["mesh"])
    mask = np.load(config["structure"]["mask"])
    GLOBAL.mesh_coord, GLOBAL.mesh_triangles, GLOBAL.mask = mesh_coord, mesh_triangles, mask
    A, _ = tv_helper.nesterov_linear_operator_from_mesh(
        GLOBAL.mesh_coord, GLOBAL.mesh_triangles, GLOBAL.mask)
    GLOBAL.A = A
    GLOBAL.CONFIG = config
Example #3
0
BASE_PATH = "/neurospin/brainomics/2013_adni/"
TEMPLATE_PATH = os.path.join(BASE_PATH, "freesurfer_template")
OUTPUT = os.path.join(BASE_PATH, "MCIc-CTL-FS")

import numpy as np
import brainomics.mesh_processing as mesh_utils
mesh_coord, mesh_triangles = mesh_utils.mesh_arrays(
    os.path.join(TEMPLATE_PATH, "lrh.pial.gii"))

# params

mask = np.load(os.path.join(OUTPUT, "mask.npy"))

import parsimony.functions.nesterov.tv as tv_helper
A, _ = tv_helper.nesterov_linear_operator_from_mesh(mesh_coord,
                                                    mesh_triangles,
                                                    mask=mask)
"""
# count neighbors (arrity) for each node
n_neighbors = np.array([len(n) for n in nodes_with_edges])
print np.sum(n_neighbors)
print np.sum(n_neighbors) / float(len(nodes_with_edges))
print [[n, np.sum(n_neighbors == n)] for n in np.unique(n_neighbors)]
# 983040
#2.99996337935
#[[0, 264], [1, 992], [2, 22115], [3, 281155], [4, 21724], [5, 1147], [6, 287]]

# count nb time a node is in a vertex
count = np.zeros(len(nodes_with_edges))
for n in nodes_with_edges:
    for v in n: