Exemplo n.º 1
0
def main(args):

    config_file = args.config
    test = args.test

    cfg = Config(config_file)

    tr = None
    if test is None:
        tr = DataSet(cfg.tr_data, cfg)
        te = DataSet(cfg.te_data, cfg, sub_sample=1)
        tr0 = DataSet([cfg.tr_data[0]], cfg, sub_sample=1)
        cfg.att = te.sz[1]
    else:
        if test == 'te':
            te = DataSet([cfg.te_data[0]], cfg)
        else:
            te = DataSet([cfg.tr_data[0]], cfg)
        cfg.att = te.sz[1]

    iterations = 10000
    loop = cfg.loop
    print "input attribute", cfg.att, "LR", cfg.lr, 'feature', cfg.feature_len

    n_att = cfg.att
    # n_length = cfg.feature_len
    n_hidden = cfg.nodes[1][-1]
    n_output = cfg.num_output
    hidden0 = ToTensor(np.ones(n_hidden).astype(np.float32))

    mrnn = FC(n_att * cfg.feature_len, cfg.nodes[0], n_output, cfg.lr)

    if test:
        mrnn.load_state_dict(torch.load(cfg.netTest[:-3]))
        run_test(mrnn, te, cfg, hidden0)
        tr_loss, tr_median = run_test(mrnn, te, cfg, hidden0)
        for a in range(len(tr_loss)):
            print a, tr_loss[a], tr_median[a]

        exit(0)

    if cfg.renetFile:
        mrnn.load_state_dict(torch.load(cfg.renetFile[:-3]))

    t00 = datetime.datetime.now()

    T = 0
    T_err = 0
    for a in range(iterations):

        tr_pre_data = tr.prepare(multi=1)
        while tr_pre_data:
            for b in tr_pre_data:
                x = ToTensor(b[0].astype(np.float32))
                y = ToTensor(b[1].astype(np.float32))
                err = mrnn.train(y, x)
                if a % loop == 0 and a > 0:
                    t1 = datetime.datetime.now()
                    print a, (t1 - t00).total_seconds() / 3600.0, T_err / T
                    T_err = 0
                    T = 0
                    torch.save(mrnn.state_dict(), cfg.netFile[:-3])
                T_err += err
                T += 1

            tr_pre_data = tr.get_next()
Exemplo n.º 2
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import numpy as np
from dataset import DatasetFactory, Config
from dataset.images import Image

Config().verbose = True
Config().dataset_type = 'wrapper_dataset'
ops = ('cropping', )

# val_ind
factory = DatasetFactory()
factory.add_image_type('image', 'label', 'mask', 'bounding_box')
factory.add_dataset(dataset_id='1', dirname='t_data_1', val_ind=[0, 2])
factory.add_dataset(dataset_id='2', dirname='t_data_2')
factory.add_dataset(dataset_id='3', dirname='t_data_3', val_ind=[0])
factory.add_training_operation(*ops)
t_dataset, v_dataset = factory.create()
t_keys = ['1/at1006', '2/at1025', '2/at1029', '3/at1034', '3/at1040']
v_keys = ['1/at1000', '1/at1007', '3/at1033']
assert list(t_dataset.images.keys()) == t_keys
assert list(v_dataset.images.keys()) == v_keys
for im in t_dataset[0]:
    if hasattr(im, 'labels'):
        print(im.labels)
    assert isinstance(im, Image)

Config().dataset_type = 'dataset'
t_dataset, v_dataset = factory.create()
for im in t_dataset[0]:
Exemplo n.º 3
0
def config():
    _config = dict(key1='val1', key2=dict())
    _config['key2']['subkey1'] = 'val21'
    return Config(_config)
Exemplo n.º 4
0
    script_config['validation_batch_size'] = args.validation_batch_size
    script_config['num_workers'] = args.num_workers
    script_config['training_dir'] = args.training_dir
    script_config['validation_dir'] = args.validation_dir
    script_config['output_prefix'] = args.output_prefix
    for key, value in script_config.items():
        setattr(args, key, value)

    print(checkpoint.keys())
    TConfig.load_dict(checkpoint['trainer_config'])
    TConfig.num_epochs = args.num_epochs
    TConfig.model_period = args.model_period
    TConfig.pred_period = args.pred_period
    TConfig.val_period = args.val_period

    DConfig.load_dict(checkpoint['dataset_config'])
    LConfig.load_dict(checkpoint['layers_config'])
else:
    script_config = args.__dict__

print('Script config')
keylen = max([len(key) + 1 for key in script_config.keys()])
for key, value in script_config.items():
    print('    %s %s' % ((key + ':').ljust(keylen), value))

DConfig.verbose = args.verbose
DConfig.aug_prob = args.augmentation_prob
DConfig.crop_shape = args.cropping_shape
DConfig.image_shape = args.input_shape
DConfig.image_suffixes = [args.image_suffix]
DConfig.label_suffixes = [args.label_suffix]
Exemplo n.º 5
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from dataset import Config

Config.show()
assert Config.image_suffixes == ['image']
Config.load_json('input.json')
assert Config.image_suffixes == ['image', 'hello']
Config.save_json('output.json')
Exemplo n.º 6
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import numpy as np
import nibabel as nib
from image_processing_3d import calc_bbox3d, resize_bbox3d, crop3d

from dataset import DatasetFactory, Config
from dataset.trees import Tree, TensorTree

ref_obj = nib.load('data/at1000_label.nii.gz')
# Config().dataset_type = 'wrapper_dataset'
Config().verbose = True
t_ops = ('cropping', 'label_normalization')
v_ops = ('cropping', 'label_normalization')
# t_ops = ('cropping', )
# v_ops = ('cropping', )

factory = DatasetFactory()
factory.add_image_type('image', 'hierachical_label', 'mask')
factory.add_dataset(dataset_id='tmc', dirname='data')
factory.add_dataset(dataset_id='kki', dirname='ped_data')
factory.add_training_operation(*t_ops)
factory.add_validation_operation(*v_ops)
t_dataset, v_dataset = factory.create()

mapping1 = {
    'Anterior Lobe': [33, 36, 43, 46, 53, 56],
    'Background': [0],
    'Cerebellum': [