Beispiel #1
0
def main(cnf, weights_from):

    config = util.load_module(cnf).config

    if weights_from is None:
        weights_from = config.weights_file
    else:
        weights_from = str(weights_from)

    files = data.get_image_files(config.get('train_dir'))
    names = data.get_names(files)
    labels = data.get_labels(names).astype(np.float32)

    net = create_net(config)

    try:
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))
    except IOError:
        print("couldn't load weights starting from scratch")
    print("Shape of files: " + str(files.shape))
    print("Shape of labels: " + str(labels.shape))
    start = time.time()
    print("fitting ...")
    net.fit(files, labels)
    end = time.time()
    print("Time elapsed for fitting: " + str(end - start))
Beispiel #2
0
def main(cnf, weights_from):

    config = util.load_module(cnf).config
    # print(config)
    if weights_from is None:
        weights_from = config.weights_file
    else:
        weights_from = str(weights_from)
    print(config.get('train_dir'))
    files = data.get_image_files(config.get('train_dir'))
    names = data.get_names(files)
    labels = data.get_labels(names).astype(np.float32)
    print("Checkpoint 5")
    net = create_net(config)
    print("Checkpoint 6")
    print(weights_from)
    # print(net.load_params_from())
    try:
        print("Checkpoint 7")
        net.load_params_from(weights_from)
        print("Checkpoint 8")
        print("loaded weights from {}".format(weights_from))
    except IOError:
        print("couldn't load weights starting from scratch")

    print("fitting ...")
    print(files)
    print(labels)
    net.fit(files, labels)
Beispiel #3
0
def transform(cnf, n_iter, skip, test, train, weights_from, test_dir):

    config = util.load_module(cnf).config

    runs = {}
    if train:
        runs["train"] = config.get("train_dir")
    if test or test_dir:
        runs["test"] = test_dir or config.get("test_dir")

    net = nn.create_net(config)

    if weights_from is None:
        net.load_params_from(config.weights_file)
        print("loaded weights from {}".format(config.weights_file))
    else:
        weights_from = str(weights_from)
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))

    if n_iter > 1:
        tfs, color_vecs = tta.build_quasirandom_transforms(
            n_iter, skip=skip, color_sigma=config.cnf["sigma"], **config.cnf["aug_params"]
        )
    else:
        tfs, color_vecs = tta.build_quasirandom_transforms(
            n_iter, skip=skip, color_sigma=0.0, **data.no_augmentation_params
        )

    for run, directory in sorted(runs.items(), reverse=True):

        print("extracting features for files in {}".format(directory))
        tic = time.time()

        files = data.get_image_files(directory)

        Xs, Xs2 = None, None

        for i, (tf, color_vec) in enumerate(zip(tfs, color_vecs), start=1):

            print("{} transform iter {}".format(run, i))

            X = net.transform(files, transform=tf, color_vec=color_vec)
            if Xs is None:
                Xs = X
                Xs2 = X ** 2
            else:
                Xs += X
                Xs2 += X ** 2

            print("took {:6.1f} seconds".format(time.time() - tic))
            if i % 5 == 0 or n_iter < 5:
                std = np.sqrt((Xs2 - Xs ** 2 / i) / (i - 1))
                config.save_features(Xs / i, i, skip=skip, test=True if run == "test" else False)
                config.save_std(std, i, skip=skip, test=True if run == "test" else False)
                print("saved {} iterations".format(i))
Beispiel #4
0
def transform(cnf=cnf,
              n_iter=n_iter,
              skip=skip,
              test=test,
              train=train,
              weights_from=weights_from,
              test_dir=test_dir):

    config = util.load_module(cnf).config

    config.cnf['batch_size_train'] = 128
    config.cnf['batch_size_test'] = 128

    runs = {}
    if train:
        runs['train'] = config.get('train_dir')
    if test or test_dir:
        runs['test'] = test_dir or config.get('test_dir')

    net = nn.create_net(config)

    if weights_from is None:
        net.load_params_from(config.weights_file)
        print("loaded weights from {}".format(config.weights_file))
    else:
        weights_from = str(weights_from)
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))

    if n_iter > 1:
        tfs, color_vecs = tta.build_quasirandom_transforms(
            n_iter,
            skip=skip,
            color_sigma=config.cnf['sigma'],
            **config.cnf['aug_params'])
    else:
        tfs, color_vecs = tta.build_quasirandom_transforms(
            n_iter, skip=skip, color_sigma=0.0, **data.no_augmentation_params)

    ret_val = []
    for run, directory in sorted(runs.items(), reverse=True):

        print("extracting features for files in {}".format(directory))
        tic = time.time()
        files = data.get_image_files(directory)

        Xs, Xs2 = None, None

        for i, (tf, color_vec) in enumerate(zip(tfs, color_vecs), start=1):

            print("{} transform iter {}".format(run, i))

            X = net.transform(files[:1000], transform=tf, color_vec=color_vec)
            ret_val.append(X)
    return ret_val, net
def main(cnf, classes, weights_from, predict):

    config = util.load_module(cnf).config
    files = data.get_image_files(config.get('train_dir'))
    names = data.get_names(files)
    names = [int(x) for x in names ]
    data.classes = int(classes)
    labels = data.get_labels(names)
    net = create_net(config)
    
    print files.shape
    print labels.shape
    if predict : 
    	if weights_from is None:
        	weights_from = config.weights_file
    	else:
        	weights_from = str(weights_from)
	print weights_from    
    	try:
        	net.load_params_from(weights_from)
        	print("loaded weights from {}".format(weights_from))
    	except IOError:
        	print("couldn't load weights starting from scratch")
    if not predict:
    	print("fitting ...")
    	net.fit(files, labels)
    else:
	print("predicting ...")
    	test_files = data.get_image_files(config.get('test_dir'))
    	y_pred = net.predict(test_files)
	y_pred = y_pred.transpose()
	print y_pred
        y_pred = np.clip(np.round(y_pred),
                         np.min(labels), np.max(labels)).astype(int)
        #print y_pred
	submission_filename = util.get_submission_filename()
        image_files = data.get_image_files(config.get('test_dir'))
        names = data.get_names(image_files)
        image_column = pd.Series(names, name='photo_id')
        level_column = pd.DataFrame(y_pred)#name='labels')
	level_column = level_column.apply(lambda x : string_submit(x))        
        predictions = pd.concat([image_column, level_column], axis=1)
        print("tail of predictions file")
        print(predictions.tail())
	predictions.columns = ['photo_id', 'labels']
        predictions.to_csv(submission_filename, index=False)
        print("saved predictions to {}".format(submission_filename))
def main(cnf, classes, weights_from, predict):

    config = util.load_module(cnf).config
    files = data.get_image_files(config.get('train_dir'))
    names = data.get_names(files)
    names = [int(x) for x in names]
    data.classes = int(classes)
    labels = data.get_labels(names)
    net = create_net(config)

    print files.shape
    print labels.shape
    if predict:
        if weights_from is None:
            weights_from = config.weights_file
        else:
            weights_from = str(weights_from)
        print weights_from
        try:
            net.load_params_from(weights_from)
            print("loaded weights from {}".format(weights_from))
        except IOError:
            print("couldn't load weights starting from scratch")
    if not predict:
        print("fitting ...")
        net.fit(files, labels)
    else:
        print("predicting ...")
        test_files = data.get_image_files(config.get('test_dir'))
        y_pred = net.predict(test_files)
        y_pred = y_pred.transpose()
        print y_pred
        y_pred = np.clip(np.round(y_pred), np.min(labels),
                         np.max(labels)).astype(int)
        #print y_pred
        submission_filename = util.get_submission_filename()
        image_files = data.get_image_files(config.get('test_dir'))
        names = data.get_names(image_files)
        image_column = pd.Series(names, name='photo_id')
        level_column = pd.DataFrame(y_pred)  #name='labels')
        level_column = level_column.apply(lambda x: string_submit(x))
        predictions = pd.concat([image_column, level_column], axis=1)
        print("tail of predictions file")
        print(predictions.tail())
        predictions.columns = ['photo_id', 'labels']
        predictions.to_csv(submission_filename, index=False)
        print("saved predictions to {}".format(submission_filename))
Beispiel #7
0
def main(cnf, weights_from, fold, exp_run_folder, train_retina):
    config = util.load_module(cnf).config
    config.cnf[
        'fold'] = fold  # <-- used to change the directories for weights_best, weights_epoch and weights_final
    config.cnf['exp_run_folder'] = exp_run_folder
    protocol = data.settings['protocol']

    if train_retina != 'train_retina':
        folds = yaml.load(open('folds/' + protocol + '.yml'))
        f0, f1 = fold.split('x')
        train_list = folds['Fold_' + f0][int(f1) - 1]
        files = data.get_image_files(config.get('train_dir'), train_list)
    else:
        files = data.get_image_files(config.get('train_dir'))

    if weights_from is None:
        weights_from = config.weights_file
    else:
        weights_from = str(weights_from)

    names = data.get_names(files)
    labels = data.get_labels(names, label_file='folds/' + protocol +
                             '.csv').astype(np.int32)
    net = nn.create_net(config)

    try:
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))
    except IOError:
        print("couldn't load weights, starting from scratch")

    #Print layerinfo
    print("## Layer information")
    import nolearn
    layer_info = nolearn.lasagne.PrintLayerInfo()
    print(layer_info._get_greeting(net))
    layer_info, legend = layer_info._get_layer_info_conv(net)
    print(layer_info)
    print(legend)
    print("fitting ...")
    net.fit(files, labels)
Beispiel #8
0
def main(cnf, weights_from):

    config = util.load_module(cnf).config

    if weights_from is None:
        weights_from = config.weights_file
    else:
        weights_from = str(weights_from)

    files = data.get_image_files(config.get('train_dir'))
    names = data.get_names(files)
    labels = data.get_labels(names).astype(np.float32)

    net = create_net(config)

    try:
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))
    except IOError:
        print("couldn't load weights starting from scratch")

    print("fitting ...")
    net.fit(files, labels)
Beispiel #9
0
def build(cnf, weights_from):

    config = util.load_module(cnf).config

    if weights_from is None:
        weights_from = config.weights_file
    else:
        weights_from = str(weights_from)

    files = data.get_image_files(config.get('train_dir'))
    names = data.get_names(files)
    labels = data.get_labels(names).astype(np.float32)

    net = create_net(config)

    try:
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))
    except IOError:
        print("couldn't load weights starting from scratch")

    print("fitting ...")
    # net.fit(files, labels)
    return net, files, names, labels
Beispiel #10
0
def transform(cnf, n_iter, skip, test, train, weights_from, test_dir):

    config = util.load_module(cnf).config

    config.cnf['batch_size_train'] = 128
    config.cnf['batch_size_test'] = 128

    runs = {}
    if train:
        runs['train'] = config.get('train_dir')
    if test or test_dir:
        runs['test'] = test_dir or config.get('test_dir')

    net = nn.create_net(config)

    if weights_from is None:
        net.load_params_from(config.weights_file)
        print("loaded weights from {}".format(config.weights_file))
    else:
        weights_from = str(weights_from)
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))

    if n_iter > 1:
        tfs, color_vecs = tta.build_quasirandom_transforms(
            n_iter,
            skip=skip,
            color_sigma=config.cnf['sigma'],
            **config.cnf['aug_params'])
    else:
        tfs, color_vecs = tta.build_quasirandom_transforms(
            n_iter, skip=skip, color_sigma=0.0, **data.no_augmentation_params)

    for run, directory in sorted(runs.items(), reverse=True):

        print("extracting features for files in {}".format(directory))
        tic = time.time()

        files = data.get_image_files(directory)

        Xs, Xs2 = None, None

        for i, (tf, color_vec) in enumerate(zip(tfs, color_vecs), start=1):

            print("{} transform iter {}".format(run, i))

            X = net.transform(files, transform=tf, color_vec=color_vec)
            if Xs is None:
                Xs = X
                Xs2 = X**2
            else:
                Xs += X
                Xs2 += X**2

            print('took {:6.1f} seconds'.format(time.time() - tic))
            if i % 10 == 0 or n_iter < 5:
                std = np.sqrt((Xs2 - Xs**2 / i) / (i - 1))
                config.save_features(Xs / i,
                                     i,
                                     skip=skip,
                                     test=True if run == 'test' else False)
                config.save_std(std,
                                i,
                                skip=skip,
                                test=True if run == 'test' else False)
                print('saved {} iterations'.format(i))
Beispiel #11
0
from __future__ import division
import time

import click
import numpy as np

import nn
import data_orig
import tta
import utils

cnf='configs/c_512_5x5_32.py'
config = utils.load_module(cnf).config
config.cnf['batch_size_train'] = 128

runs = {}
runs['train'] = config.get('train_dir')

net = nn.create_net(config)

weights_from = 'weights/c_512_5x5_32/weights_final.pkl'
net.load_params_from(weights_from)

tf, color_vecs = tta.build_quasirandom_transforms(1, skip=0, color_sigma=0.0, **data_orig.no_augmentation_params)
for i, (tf, color_vec) in enumerate(zip(tfs, color_vecs), start=1):
    pass



Beispiel #12
0
def transform(cnf, exp_run_folder, n_iter, skip, test, train, weights_from,  test_dir, fold):

    config = util.load_module(cnf).config
    config.cnf['fold'] = fold                           # <-- used to change the directories for weights_best, weights_epoch and weights_final
    config.cnf['exp_run_folder'] = exp_run_folder

    runs = {}
    if train:
        runs['train'] = config.get('train_dir')
    if test or test_dir:
        runs['test'] = test_dir or config.get('test_dir')

    folds = yaml.load(open('folds/'+data.settings['protocol']+'.yml'))
    f0, f1 = fold.split('x')
    train_list = folds['Fold_' + f0][int(f1)-1]
    test_list  = folds['Fold_' + f0][0 if f1=='2' else 1]

    net = nn.create_net(config)

    if weights_from is None:
        net.load_params_from(config.weights_file)
        print("loaded weights from {}".format(config.weights_file))
    else:
        weights_from = str(weights_from)
        net.load_params_from(weights_from)
        print("loaded weights from {}".format(weights_from))

    if n_iter > 1:
        tfs, color_vecs = tta.build_quasirandom_transforms(
                n_iter, skip=skip, color_sigma=config.cnf['sigma'],
                **config.cnf['aug_params'])
    else:
        tfs, color_vecs = tta.build_quasirandom_transforms(
               n_iter, skip=skip, color_sigma=0.0,
                **data.no_augmentation_params)

    for run, directory in sorted(runs.items(), reverse=True):

        print("extracting features for files in {}".format(directory))
        tic = time.time()

        if run == 'train':
            files = data.get_image_files(directory, train_list)
        else:
            files = data.get_image_files(directory, test_list)

        Xs, Xs2 = None, None

        for i, (tf, color_vec) in enumerate(zip(tfs, color_vecs), start=1):

            print("{} transform iter {}".format(run, i))

            X = net.transform(files, transform=tf, color_vec=color_vec)
            if Xs is None:
                Xs = X
                Xs2 = X**2
            else:
                Xs += X
                Xs2 += X**2

            print('took {:6.1f} seconds'.format(time.time() - tic))
            if i % 5 == 0 or n_iter < 5:
                std = np.sqrt((Xs2 - Xs**2 / i) / (i - 1))
                config.save_features_fold(Xs / i, i, skip=skip, fold=fold,
                                     test=True if run == 'test' else False)
                #config.save_std_fold(std, i, skip=skip, fold=fold,
                #               test=True if run == 'test' else False)
                print('saved {} iterations'.format(i))