Example #1
0
def generate(config, group, path):
    config = Config.load(config)
    subjects = pickle.load(open(config.subjects, 'rb'))

    images = Random_Images.load_group(group)
    images.generate_images(subjects, path)
    images.save_group(overwrite=True)

    interact(locals())
Example #2
0
def load(group, config, structure):
    if config:
        config = Config.load(config)
        subjects = pickle.load(open(config.subjects, 'rb'))

    if structure:
        images = Random_Images.load_group(group, fname=structure)
    else:
        images = Random_Images.load_group(group)
    print(images)
    interact(locals())
def load(config):
    config = Config.load(config)
    subjects = pickle.load(open(config.subjects, 'rb'))
    cluster = Cluster.create(subjects, config)

    logger.info('Training model')
    cluster.train()
    pred = cluster.predictions
    logger.info('Done training network')

    interact(locals())
Example #4
0
def main(path, name, lr):
    config = Config.load(_config)
    print(config.__dict__)
    subjects = pickle.load(open(config.subjects, 'rb'))

    labels = pe.Aggregate.load('mh2')
    truth = pe.Aggregate.load('mh2_gold')
    _labels = labels.subject_labels(), truth.subject_labels()

    subjects = subjects.subset(truth.labeled_subjects())
    truth.apply_labels(subjects)

    cluster = Cluster.create(subjects, config)
    cluster.initialize()
    mapping = DEC_Updater(cluster, *_labels, train, validate, 0.95)
    print(mapping.scores)

    optimizer = optimizers.SGD(lr=float(lr))
    mapping.init_model('categorical_crossentropy', optimizer)
    print(mapping.scores)

    weights_fname = os.path.join(path, name)
    kwargs = {
        'epochs':
        500,
        'batch_size':
        256,
        'callbacks': [
            ModelCheckpoint(weights_fname,
                            monitor='val_loss',
                            save_best_only=True,
                            save_weights_only=True,
                            mode='min'),
            EarlyStopping(monitor='val_loss',
                          min_delta=0,
                          patience=20,
                          verbose=0,
                          mode='min')
        ]
    }

    def fit(model, train, val):
        model.fit(*train, validation_data=val, **kwargs)

    # def xy(train, subjects):
    # return subjects.get_xy_volunteer(labels.data['subjects'], False)
    mapping.apply_mapping(fit)

    print(mapping.cluster_mapping())
    print('scores:', utils.pd_scores(mapping.scores))
def test(config, new):
    config = Config.load(config)
    subjects = pickle.load(open(config.subjects, 'rb'))
    cluster = Cluster.create(subjects, config)

    logger.info('Training model')
    cluster.train()
    pred = cluster.predictions
    logger.info('Done training network')

    from muon.project.images import Random_Images
    if new:
        images = Random_Images.new(cluster)
    else:
        images = Random_Images.load_group(0)
    interact(locals())
Example #6
0
def new(config, width, size, permutations, save):
    config = Config.load(config)
    subjects = pickle.load(open(config.subjects, 'rb'))
    cluster = Cluster.create(subjects, config)

    logger.info('Training model')
    cluster.train()
    logger.info('Done training network')

    kwargs = {}
    if width:
        kwargs['width'] = width
    if size:
        kwargs['image_size'] = size
    if permutations:
        kwargs['permutations'] = permutations

    images = Random_Images.new(cluster, **kwargs)

    if save:
        images.save_group()

    interact(locals())
from muon.deep_clustering.clustering import Config, Cluster
from muon.utils.subjects import Subjects
from muon.deep_clustering.mapping import Mapping

import os
import pickle
import math
import logging
import numpy as np
import pandas as pd
import csv
logger = logging.getLogger(__name__)

config = 'mnt/dec/dec_no_labels/config_jupyter.json'
config = Config.load(config)
print(config.__dict__)
subjects = pickle.load(open(config.subjects, 'rb'))
# cluster = Cluster.create(subjects, config)

import muon.project.parse_export as pe
agg = pe.Aggregate.load('mh2')

_s = list(agg.data['subjects'].keys())
subjects2 = subjects.subset(_s)
agg.apply_labels(subjects2)
cluster = Cluster.create(subjects2, config)
print('Initializing model')
cluster.initialize()