Example #1
0
    def __init__(self, pid, mem, cpu, length=100):
        self.__dict__.update(locals())
        self.cpus = []
        self.mems = []
        self.br = False
        super(StatProcessor, self).__init__()

        self.logger = logger.setup_custom_logger("stats", logging.DEBUG)
Example #2
0
    def __init__(self, pid, mem, cpu, length=100):
        self.__dict__.update(locals())
        self.cpus = []
        self.mems = []
        self.br = False
        super(StatProcessor, self).__init__()

        self.logger = logger.setup_custom_logger("stats", logging.DEBUG)
Example #3
0
    def __init__(self,
                 experiment,
                 out_path,
                 processing_flag,
                 mem,
                 cpu,
                 last_processed,
                 dbdescr=None,
                 job_id=None):
        self.__dict__.update(locals())
        self.on = False
        self.channels = []
        self.collect_channels = False
        self.channel_groups = {}

        self.d = {
            "name": experiment.name,
            "yaml": None,
            "results_of_interest": experiment.results_of_interest +\
                ["cpu", "mem"],
            "stats": {
                "status": "STARTING",
                "pid": None,
                "user": None,
                "host": socket.gethostname(),
                "create_time": None,
                "last_heard": None,
                "port": None
                },
            "processing": False,
            "last_processed": "Never",
            "hyperparams": None,
            "logs": {
                "cpu": {"cpu": []},
                "mem": {"mem": []}
                },
            "log_stream": ""
            }

        self.logger = logger.setup_custom_logger("pl2mind", logging.DEBUG)
        log_file = path.join(out_path, "model.log")
        formatter = logging.Formatter(fmt="%(asctime)s:%(levelname)s:"
                                      "%(module)s:%(message)s")

        fh = logging.FileHandler(log_file, mode="w")
        fh.setLevel(logging.DEBUG)
        fh.setFormatter(formatter)
        self.logger.addHandler(fh)

        h = logging.StreamHandler(MetaLogHandler(self.d))
        h.setLevel(logging.DEBUG)
        h.setFormatter(formatter)
        self.logger.addHandler(h)

        self.write_json()
Example #4
0
    def __init__(self, experiment, checkpoint, ep, flag, last_processed):
        self.__dict__.update(locals())
        self.socket = None

        self.out_path = "/".join(checkpoint.split("/")[:-1])
        self.best_checkpoint = path.join(
            self.out_path,
            checkpoint.split("/")[-1].split(".")[0] + "_best.pkl")
        self.persistent = False
        super(ModelProcessor, self).__init__()

        self.logger = logger.setup_custom_logger("processor", logging.DEBUG)
Example #5
0
    def __init__(self, experiment, checkpoint, ep,
                 flag, last_processed):
        self.__dict__.update(locals())
        self.socket = None

        self.out_path = "/".join(checkpoint.split("/")[:-1])
        self.best_checkpoint = path.join(self.out_path,
                                         checkpoint.split(
                                            "/")[-1].split(
                                            ".")[0] + "_best.pkl")
        self.persistent = False
        super(ModelProcessor, self).__init__()

        self.logger = logger.setup_custom_logger("processor", logging.DEBUG)
Example #6
0
    def __init__(self, experiment, out_path, processing_flag, mem, cpu,
                 last_processed, dbdescr=None, job_id=None):
        self.__dict__.update(locals())
        self.on = False
        self.channels = []
        self.collect_channels = False
        self.channel_groups = {}

        self.d = {
            "name": experiment.name,
            "yaml": None,
            "results_of_interest": experiment.results_of_interest +\
                ["cpu", "mem"],
            "stats": {
                "status": "STARTING",
                "pid": None,
                "user": None,
                "host": socket.gethostname(),
                "create_time": None,
                "last_heard": None,
                "port": None
                },
            "processing": False,
            "last_processed": "Never",
            "hyperparams": None,
            "logs": {
                "cpu": {"cpu": []},
                "mem": {"mem": []}
                },
            "log_stream": ""
            }

        self.logger = logger.setup_custom_logger("pl2mind", logging.DEBUG)
        log_file = path.join(out_path, "model.log")
        formatter = logging.Formatter(fmt="%(asctime)s:%(levelname)s:"
                                      "%(module)s:%(message)s")

        fh = logging.FileHandler(log_file, mode="w")
        fh.setLevel(logging.DEBUG)
        fh.setFormatter(formatter)
        self.logger.addHandler(fh)

        h = logging.StreamHandler(MetaLogHandler(self.d))
        h.setLevel(logging.DEBUG)
        h.setFormatter(formatter)
        self.logger.addHandler(h)

        self.write_json()
Example #7
0
import multiprocessing as mp
import networkx as nx
import numpy as np
import os
from os import path
import pickle

from pl2mind.analysis import feature_extraction as fe
from pl2mind.datasets import MRI
from pl2mind import logger
from pl2mind.tools import simtb_viewer
from pylearn2.datasets.transformer_dataset import TransformerDataset
from pylearn2.utils import serial


logger = logger.setup_custom_logger("pl2mind", logging.ERROR)

def save_simtb_montage(dataset, features, out_file, feature_dict,
                       target_stat=None, target_value=None):
    """
    Saves a simtb montage.
    """

    logger.info("Saving simtb montage")
    weights_view = dataset.get_weights_view(features)
    simtb_viewer.montage(weights_view, out_file=out_file,
                         feature_dict=feature_dict,
                         target_stat=target_stat,
                         target_value=target_value)

def save_helper(args):
Example #8
0
__licence__ = "3-clause BSD"
__email__ = "*****@*****.**"
__maintainer__ = "Alvaro Ulloa"

import argparse
import logging
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np

from pl2mind import logger

import scipy.signal as ss

logger = logger.setup_custom_logger("pl2mind", logging.WARNING)


def qea(im):
    """
    Quasi-eigen approximation function.

    Parameters
    ----------
    im: array_like
        1d vector that contains a time series

    Returns
    -------
    ia: array_like
        instantaneous amplitude
Example #9
0
__maintainer__ = "Alvaro Ulloa"


import argparse
import logging
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np

from pl2mind import logger

import scipy.signal as ss


logger = logger.setup_custom_logger("pl2mind", logging.WARNING)

def qea(im):
    """
    Quasi-eigen approximation function.

    Parameters
    ----------
    im: array_like
        1d vector that contains a time series

    Returns
    -------
    ia: array_like
        instantaneous amplitude
    ip: array_like
Example #10
0
from os import path
import pickle

from pl2mind import logger
from pylearn2.utils import serial

from random import shuffle
import re
from scipy import io
from scipy.stats import kurtosis
from scipy.stats import skew
import sys
from sys import stdout


logger = logger.setup_custom_logger("pl2mind", logging.ERROR)

def natural_sort(l):
    convert = lambda text: int(text) if text.isdigit() else text.lower()
    alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ]
    return sorted(l, key = alphanum_key)

def save_variance_map(dataset, save_path):
    logger.info("Saving variance file")
    variance_map = dataset.X.std(axis=0)
    np.save(save_path, variance_map)

def pull_niftis(source_directory, *args):
    """
    Pull healthy and schizophrenia nitfi files from a source_directory.
    Uses glob to get multiple files.
Example #11
0
import logging
import multiprocessing as mp
from nipy import load_image
from nipy import save_image
import numpy as np
import pickle
from pl2mind import logger
import pprint
import re
from scipy import (reshape, zeros, where, std, argmax, sqrt, ceil, floor, sign,
                   negative, linspace, double, float16)
import subprocess
from sys import stdout


logger = logger.setup_custom_logger("pl2mind", logging.DEBUG)

# These are general names of regions for use elsewhere.
singles = ["Postcentral Gyrus",
           "Cingulate Gyrus",
           "Thalamus",
           "Superior Frontal Gyrus",
           "Pyramis",
           "Caudate",
           "Declive",
           "Cuneus",
           "Ulvula",
           "Medial Frontal Gyrus",
           "Precuneus",
           "Lingual Gyrus",
           "Paracentral Lobule",
Example #12
0
import itertools
import logging
import multiprocessing as mp
from nipy import load_image
from nipy import save_image
import numpy as np
import pickle
from pl2mind import logger
import pprint
import re
from scipy import (reshape, zeros, where, std, argmax, sqrt, ceil, floor, sign,
                   negative, linspace, double, float16)
import subprocess
from sys import stdout

logger = logger.setup_custom_logger("pl2mind", logging.DEBUG)

# These are general names of regions for use elsewhere.
singles = [
    "Postcentral Gyrus", "Cingulate Gyrus", "Thalamus",
    "Superior Frontal Gyrus", "Pyramis", "Caudate", "Declive", "Cuneus",
    "Ulvula", "Medial Frontal Gyrus", "Precuneus", "Lingual Gyrus",
    "Paracentral Lobule", "Semi-Lunar Lobule", "Posterior Cingulate", "Culmen",
    "Cerebellar Tonsil", "Cingulate Gyrus", "Middle Frontal Gyrus",
    "Anterior Cingulate"
]

# Larger functional regions. Not used here, but can be referenced.
SC = [
    "Caudate", "Putamen", "Thalamus", "Caudate Tail", "Caudate Body",
    "Caudate Head"