Esempio n. 1
0
import sys

import numpy as np

dir_name = os.path.dirname(os.path.realpath(sys.argv[0]))


import matplotlib.pyplot as plt
import seaborn.apionly as sns

from modl.utils.system import get_output_dir

idx = pd.IndexSlice

run_id = 23
run_dir = join(get_output_dir(), 'multi_decompose_images', str(run_id), 'run')
analysis_dir = join(get_output_dir(), 'multi_decompose_images', str(run_id), 'analysis')
if not os.path.exists(analysis_dir):
    os.makedirs(analysis_dir)

data = []
for this_dir in os.listdir(run_dir):
    this_dir = join(run_dir, this_dir)
    try:
        config = json.load(open(join(this_dir, 'config.json'), 'r'))
        info = json.load(open(join(this_dir, 'info.json'), 'r'))
    except FileNotFoundError:
        print('Skipping %s' % this_dir)
        continue
    method = config['method']
    step_size = config['step_size']
Esempio n. 2
0
from joblib import dump
import sys

import numpy as np

dir_name = os.path.dirname(os.path.realpath(sys.argv[0]))

import matplotlib.pyplot as plt
import seaborn.apionly as sns

from modl.utils.system import get_output_dir

idx = pd.IndexSlice

run_id = 14
run_dir = join(get_output_dir(), 'multi_decompose_fmri', str(run_id), 'run')
analysis_dir = join(get_output_dir(), 'multi_decompose_fmri', str(run_id),
                    'analysis')
if not os.path.exists(analysis_dir):
    os.makedirs(analysis_dir)

data = []
for this_dir in os.listdir(run_dir):
    this_dir = join(run_dir, this_dir)
    try:
        config = json.load(open(join(this_dir, 'config.json'), 'r'))
        info = json.load(open(join(this_dir, 'info.json'), 'r'))
    except FileNotFoundError:
        print('Skipping %s' % this_dir)
        continue
    method = config['method']
import numpy as np
from sacred import Experiment
from sacred.observers import FileStorageObserver
from sklearn.externals.joblib import Parallel
from sklearn.externals.joblib import delayed
from sklearn.utils import check_random_state

from modl.utils.system import get_output_dir

# Add examples to known modules
sys.path.append(
    path.dirname(path.dirname(path.dirname(path.abspath(__file__)))))
from exps.exp_decompose_fmri import exp as single_exp

exp = Experiment('multi_decompose_fmri')
basedir = join(get_output_dir(), 'multi_decompose_fmri')
if not os.path.exists(basedir):
    os.makedirs(basedir)
exp.observers.append(FileStorageObserver.create(basedir=basedir))


@exp.config
def config():
    n_jobs = 2
    n_seeds = 1
    seed = 1


@single_exp.config
def config():
    n_components = 70
Esempio n. 4
0
# License: BSD
import os
from os.path import join

import matplotlib.pyplot as plt
from sacred import Experiment
from sacred.observers import FileStorageObserver

from modl.datasets.image import load_image
from modl.decomposition.image import ImageDictFact, DictionaryScorer
from modl.feature_extraction.image import LazyCleanPatchExtractor
from modl.plotting.image import plot_patches
from modl.utils.system import get_output_dir

exp = Experiment('decompose_images')
base_artifact_dir = join(get_output_dir(), 'decompose_images')
exp.observers.append(FileStorageObserver.create(basedir=base_artifact_dir))


@exp.config
def config():
    batch_size = 400
    learning_rate = 0.92
    reduction = 10
    alpha = 0.08
    n_epochs = 12
    n_components = 100
    test_size = 4000
    max_patches = 10000
    patch_size = (32, 32)
    n_threads = 2
Esempio n. 5
0
from cogspaces.datasets.utils import fetch_mask
from hcp_builder.dataset import fetch_hcp, fetch_hcp_timeseries
from modl.input_data.fmri.rest import create_raw_rest_data
from modl.utils.system import get_output_dir

smoothing_fwhm = 4
n_jobs = 20

imgs_list = fetch_hcp_timeseries(None, data_type='rest',
                                 n_subjects=None, subjects=None,
                                 on_disk=True)

root = '/storage/store/data/HCP900'
mask_img = fetch_mask()['icbm_gm']

artifact_dir = join(get_output_dir(), 'unmasked', 'hcp_icbm_gm')
if not os.path.exists(artifact_dir):
    os.makedirs(artifact_dir)

create_raw_rest_data(imgs_list,
                     root=root,
                     raw_dir=artifact_dir,
                     overwrite=False,
                     mock=False,
                     masker_params=dict(smoothing_fwhm=smoothing_fwhm,
                                        detrend=True,
                                        standardize=True,
                                        mask_img=mask_img),
                     memory=None,
                     n_jobs=n_jobs)
Esempio n. 6
0
import numpy as np
from sacred import Experiment
from sacred.observers import FileStorageObserver
from sklearn.externals.joblib import Parallel
from sklearn.externals.joblib import delayed
from sklearn.utils import check_random_state

from modl.utils.system import get_output_dir

# Add examples to known modules
sys.path.append(
    path.dirname(path.dirname(path.dirname(path.abspath(__file__)))))
from exps.exp_decompose_images import exp as single_exp

exp = Experiment('multi_decompose_images')
basedir = join(get_output_dir(), 'multi_decompose_images')
if not os.path.exists(basedir):
    os.makedirs(basedir)
exp.observers.append(FileStorageObserver.create(basedir=basedir))


@exp.config
def config():
    n_jobs = 15
    n_seeds = 1
    seed = 1


@single_exp.config
def config():
    batch_size = 200
from sklearn.externals.joblib import Memory

from modl.input_data.fmri.rest import create_raw_rest_data
from modl.utils.system import get_cache_dirs, get_output_dir

smoothing_fwhm = 6
n_jobs = 20

dataset = fetch_adhd()

memory = Memory(cachedir=get_cache_dirs()[0])
imgs_list = dataset.rest
root = dataset.root
mask_img = dataset.mask

artifact_dir = join(get_output_dir(), 'unmasked', 'adhd_6')
if not os.path.exists(artifact_dir):
    os.makedirs(artifact_dir)

create_raw_rest_data(imgs_list,
                     root=root,
                     raw_dir=artifact_dir,
                     overwrite=False,
                     mock=False,
                     masker_params=dict(smoothing_fwhm=smoothing_fwhm,
                                        detrend=True,
                                        standardize=True,
                                        mask_img=mask_img),
                     memory=memory,
                     n_jobs=n_jobs)
Esempio n. 8
0
from modl.input_data.fmri.rest import create_raw_rest_data
from modl.utils.system import get_cache_dirs, get_output_dir

from hcp_builder.dataset import fetch_hcp

smoothing_fwhm = 4
n_jobs = 3

dataset = fetch_hcp()

memory = Memory(cachedir=get_cache_dirs()[0])
imgs_list = dataset.rest
root = dataset.root
mask_img = dataset.mask

artifact_dir = join(get_output_dir(), 'unmasked', 'hcp')
if not os.path.exists(artifact_dir):
    os.makedirs(artifact_dir)

create_raw_rest_data(imgs_list,
                     root=root,
                     raw_dir=artifact_dir,
                     overwrite=False,
                     mock=False,
                     masker_params=dict(smoothing_fwhm=smoothing_fwhm,
                                        detrend=True,
                                        standardize=True,
                                        mask_img=mask_img),
                     memory=memory,
                     n_jobs=n_jobs)
def compute_components(n_components, batch_size, learning_rate, method,
                       reduction, alpha, step_size, n_jobs, n_epochs, verbose,
                       source, _run):
    basedir = join(_run.observers[0].basedir, str(_run._id))
    artifact_dir = join(basedir, 'artifacts')
    if not os.path.exists(artifact_dir):
        os.makedirs(artifact_dir)

    if source == 'hcp':
        # Hack to recover data from TSP
        train_size = None
        smoothing_fwhm = 3
        test_size = 2
        data_dir = get_data_dirs()[0]
        mask = fetch_hcp_mask()
        masker = MultiRawMasker(mask_img=mask,
                                smoothing_fwhm=smoothing_fwhm,
                                detrend=True,
                                standardize=True)
        mapping = json.load(
            open(join(data_dir, 'HCP_unmasked/mapping.json'), 'r'))
        data = sorted(list(mapping.values()))
        data = list(map(lambda x: join(data_dir, x), data))
        data = pd.DataFrame(data, columns=['filename'])
    else:
        smoothing_fwhm = 6
        train_size = 4
        test_size = 4
        raw_res_dir = join(get_output_dir(), 'unmasked', source)
        try:
            masker, data = get_raw_rest_data(raw_res_dir)
        except ValueError:  # On local machine:
            raw_res_dir = join(get_output_dir(), 'unmask', source)
            masker, data = get_raw_rest_data(raw_res_dir)

    train_imgs, test_imgs = train_test_split(data,
                                             test_size=test_size,
                                             random_state=0,
                                             train_size=train_size)
    train_imgs = train_imgs['filename'].values
    test_imgs = test_imgs['filename'].values

    cb = rfMRIDictionaryScorer(test_imgs, info=_run.info)
    dict_fact = fMRIDictFact(
        method=method,
        mask=masker,
        verbose=verbose,
        n_epochs=n_epochs,
        n_jobs=n_jobs,
        random_state=1,
        n_components=n_components,
        smoothing_fwhm=smoothing_fwhm,
        learning_rate=learning_rate,
        batch_size=batch_size,
        reduction=reduction,
        step_size=step_size,
        alpha=alpha,
        callback=cb,
    )
    dict_fact.fit(train_imgs)
    dict_fact.components_img_.to_filename(
        join(artifact_dir, 'components.nii.gz'))
    fig = plt.figure()
    display_maps(fig, dict_fact.components_img_)
    plt.savefig(join(artifact_dir, 'components.png'))

    fig, ax = plt.subplots(1, 1)
    ax.plot(cb.cpu_time, cb.score, marker='o')
    _run.info['time'] = cb.cpu_time
    _run.info['score'] = cb.score
    _run.info['iter'] = cb.iter
    plt.savefig(join(artifact_dir, 'score.png'))
monkey_patch_nifti_image()

from sklearn.model_selection import train_test_split

from modl.input_data.fmri.rest import get_raw_rest_data
from modl.decomposition.fmri import fMRIDictFact, rfMRIDictionaryScorer
from modl.plotting.fmri import display_maps
from modl.utils.system import get_output_dir

from sacred import Experiment

import pandas as pd

exp = Experiment('decomppose_fmri')
base_artifact_dir = join(get_output_dir(), 'decompose_fmri')
exp.observers.append(FileStorageObserver.create(basedir=base_artifact_dir))


@exp.config
def config():
    n_components = 70
    batch_size = 100
    learning_rate = 0.92
    method = 'dictionary only'
    reduction = 1
    alpha = 1e-4
    n_epochs = 100
    verbose = 30
    n_jobs = 5
    step_size = 1e-5