Exemplo n.º 1
0
def train_model():
    batch_size = 16
    num_epochs = c.ch4_train_epochs
    sz = c.fcn_img_size
    version=2
    for i in xrange(5):
        data = u.DataH5PyStreamer(os.path.join(c.data_intermediate, 'ch4_256.hdf5'),
                batch_size=batch_size, folds=(5,i))
        input_var = T.tensor4('input')
        label_var = T.tensor4('label')
        net, output, output_det = m.build_fcn_segmenter(input_var,
                (None, 1, sz, sz), version=version)
        params = nn.layers.get_all_params(net['output'], trainable=True)
        lr = theano.shared(nn.utils.floatX(3e-3))
        loss = du.sorenson_dice(output, label_var)
        te_loss = du.sorenson_dice(output_det, label_var)
        te_acc = nn.objectives.binary_accuracy(output_det, label_var).mean()
        updates = nn.updates.adam(loss, params, learning_rate=lr)
        train_fn = theano.function([input_var, label_var], loss, updates=updates)
        test_fn = theano.function([input_var, label_var], te_loss)
        acc_fn = theano.function([input_var, label_var], te_acc)
        pred_fn = theano.function([input_var], output_det)
        hist = u.train_with_hdf5(data, num_epochs=num_epochs,
                train_fn = train_fn, test_fn=test_fn,
                max_per_epoch=-1, use_tqdm=False,
                tr_transform=lambda x: du.segmenter_data_transform(x, rotate=(-180, 180)),
                te_transform=lambda x: du.segmenter_data_transform(x, rotate=None),
                last_layer = net['output'],
                save_params_to=os.path.join(c.params_dir, 'ch4seg_v{}/test_ch4seg_f{}_v{}.npz'\
                        .format(version, i, version)))
Exemplo n.º 2
0
def train_model():
    batch_size = 16
    num_epochs = c.ch4_train_epochs
    sz = c.fcn_img_size
    version = 2
    for i in xrange(5):
        data = u.DataH5PyStreamer(os.path.join(c.data_intermediate,
                                               'ch4_256.hdf5'),
                                  batch_size=batch_size,
                                  folds=(5, i))
        input_var = T.tensor4('input')
        label_var = T.tensor4('label')
        net, output, output_det = m.build_fcn_segmenter(input_var,
                                                        (None, 1, sz, sz),
                                                        version=version)
        params = nn.layers.get_all_params(net['output'], trainable=True)
        lr = theano.shared(nn.utils.floatX(3e-3))
        loss = du.sorenson_dice(output, label_var)
        te_loss = du.sorenson_dice(output_det, label_var)
        te_acc = nn.objectives.binary_accuracy(output_det, label_var).mean()
        updates = nn.updates.adam(loss, params, learning_rate=lr)
        train_fn = theano.function([input_var, label_var],
                                   loss,
                                   updates=updates)
        test_fn = theano.function([input_var, label_var], te_loss)
        acc_fn = theano.function([input_var, label_var], te_acc)
        pred_fn = theano.function([input_var], output_det)
        hist = u.train_with_hdf5(data, num_epochs=num_epochs,
                train_fn = train_fn, test_fn=test_fn,
                max_per_epoch=-1, use_tqdm=False,
                tr_transform=lambda x: du.segmenter_data_transform(x, rotate=(-180, 180)),
                te_transform=lambda x: du.segmenter_data_transform(x, rotate=None),
                last_layer = net['output'],
                save_params_to=os.path.join(c.params_dir, 'ch4seg_v{}/test_ch4seg_f{}_v{}.npz'\
                        .format(version, i, version)))
Exemplo n.º 3
0
def train_model():
    batch_size = 8
    version = 2
    total_epochs = c.fcn_train_epochs
    for normpct in [(10, 90), None]:
        stop_times = []
        for i in [0, 1, 2, 3, 4, -1]:
            num_epochs = int(np.mean(stop_times)) if i == -1 else total_epochs
            data = u.DataH5PyStreamer(os.path.join(c.data_intermediate,
                                                   'scd_seg_256.hdf5'),
                                      batch_size=batch_size,
                                      folds=(5, i))
            input_var = T.tensor4('input')
            label_var = T.tensor4('label')
            net, output, output_det = m.build_fcn_segmenter(
                input_var, (None, 1, c.fcn_img_size, c.fcn_img_size), version)
            params = nn.layers.get_all_params(net['output'], trainable=True)

            lr = theano.shared(nn.utils.floatX(3e-3))
            loss = du.sorenson_dice(output, label_var)
            te_loss = du.sorenson_dice(output_det, label_var)
            te_acc = nn.objectives.binary_accuracy(output_det,
                                                   label_var).mean()
            updates = nn.updates.adam(loss, params, learning_rate=lr)
            train_fn = theano.function([input_var, label_var],
                                       loss,
                                       updates=updates)
            test_fn = theano.function([input_var, label_var], te_loss)
            pred_fn = theano.function([input_var], output_det)

            normstr = (str(normpct[0]) + str(normpct[1])) if normpct else 'MS'
            pfn = os.path.join(
                c.params_dir, 'fcn_v{}_p{}/fcn_v{}_p{}_f{}_{}.npz'.format(
                    version, normstr, version, normstr, i,
                    np.random.randint(100000)))
            hist = u.train_with_hdf5(
                data,
                num_epochs=num_epochs,
                train_fn=train_fn,
                test_fn=test_fn,
                max_per_epoch=-1,
                tr_transform=lambda x: du.segmenter_data_transform(
                    x, rotate=(-10, 50), normalize_pctwise=normpct),
                te_transform=lambda x: du.segmenter_data_transform(
                    x, rotate=None, normalize_pctwise=normpct),
                use_tqdm=False,
                last_layer=net['output'],
                save_last_params=(i == -1),
                save_params_to=pfn)
            if i != -1:
                stop_times.append(np.argmin(np.array(hist)[:, 1]) + 1)
                print 'stop time {}'.format(stop_times[-1])
Exemplo n.º 4
0
    sca = c.scale

    ntimes = 6
    #NCV+1
    np.random.seed(1234)
    input_var = T.tensor4('input')
    label_var = T.tensor4('label')
    net, output, output_det = build_fcn_segmenter(
        input_var, (None, 1, c.fcn_img_size, c.fcn_img_size), version)
    for l in nn.layers.get_all_layers(net['output']):
        print nn.layers.get_output_shape(l)
    params = nn.layers.get_all_params(net['output'], trainable=True)
    init0 = nn.layers.get_all_param_values(net['output'])

    lr = theano.shared(nn.utils.floatX(3e-3))
    loss = du.sorenson_dice(output, label_var, ss=ss)
    te_loss = du.sorenson_dice(output_det, label_var, ss=ss)
    te_acc = nn.objectives.binary_accuracy(output_det, label_var).mean()
    updates = nn.updates.adam(loss, params, learning_rate=lr)
    train_fn = theano.function([input_var, label_var], loss, updates=updates)
    test_fn = theano.function([input_var, label_var], te_loss)
    acc_fn = theano.function([input_var, label_var], te_acc)
    pred_fn = theano.function([input_var], output_det)

    batch_size = 16
    max_epoch = (0 if CV else num_epochs)
    for i in xrange(ntimes):
        if not CV and i != 5:
            continue
        if i == 5:
            num_epochs = max_epoch + 1
Exemplo n.º 5
0
    shi = c.shift;
    rot = c.rotation;
    sca = c.scale;

    ntimes  = 6; #NCV+1
    np.random.seed(1234);
    input_var = T.tensor4('input')
    label_var = T.tensor4('label')
    net, output, output_det =build_fcn_segmenter(input_var, (None, 1, c.fcn_img_size, c.fcn_img_size), version)
    for l in nn.layers.get_all_layers(net['output']):
        print nn.layers.get_output_shape(l)
    params = nn.layers.get_all_params(net['output'], trainable=True)
    init0 = nn.layers.get_all_param_values(net['output']);

    lr = theano.shared(nn.utils.floatX(3e-3))
    loss = du.sorenson_dice(output, label_var,ss=ss)
    te_loss = du.sorenson_dice(output_det, label_var,ss=ss)
    te_acc = nn.objectives.binary_accuracy(output_det, label_var).mean()
    updates = nn.updates.adam(loss, params, learning_rate=lr)
    train_fn = theano.function([input_var, label_var], loss, updates=updates)
    test_fn = theano.function([input_var, label_var], te_loss)
    acc_fn = theano.function([input_var, label_var], te_acc)
    pred_fn = theano.function([input_var], output_det)


    batch_size=16
    max_epoch = (0 if CV else num_epochs);
    for i in xrange(ntimes):
        if not CV and i!=5:
            continue
        if i == 5: