示例#1
0
def main():
    # FOR CIFAR-10
    # dataset_generators = {
    #     'train': read_data.cifar10_dataset_generator('train', 512),
    #     'test': read_data.cifar10_dataset_generator('test', -1)
    # }

    # FOR SVHN
    dataset_generators = {
        'train': read_data.svhn_dataset_generator('train', 512),
        'test': read_data.svhn_dataset_generator('test', 512)
    }

    print("map")
    model_dict = model.apply_classification_loss(model.cnn_map)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
    print("stride 44")
    model_dict = model.apply_classification_loss(model.cnn_modification_s44)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
    print("stride 24")
    model_dict = model.apply_classification_loss(model.cnn_modification_s24)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
    print("stride11")
    model_dict = model.apply_classification_loss(model.cnn_modification_s11)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
    print("filternum=12")
    model_dict = model.apply_classification_loss(model.cnn_modification_f12)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
    print("filternum=24")
    model_dict = model.apply_classification_loss(model.cnn_modification_f24)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
    print("filternum=48")
    model_dict = model.apply_classification_loss(model.cnn_modification_f48)
    train_model(model_dict, dataset_generators, epoch_n=20, print_every=100)
示例#2
0
def main():
    # FOR SVHN
    dataset_generators = {
        'train': read_data.svhn_dataset_generator('train', 512),
        'test': read_data.svhn_dataset_generator('test', 512)
    }

    model_dict = model.apply_classification_loss(model.cnn_map)
    new_train_model(model_dict,
                    dataset_generators,
                    epoch_n=50,
                    print_every=30,
                    save_model=True)

    cnn_expanded_dict = model.apply_classification_loss(model.cnn_expanded)
    new_train_model(cnn_expanded_dict,
                    dataset_generators,
                    epoch_n=50,
                    print_every=30,
                    load_model=True)
示例#3
0
def main():
    dataset_generators = {
        'train': read_data.cifar10_dataset_generator('train', 128),
        'test': read_data.cifar10_dataset_generator('test', -1)
    }

    cnn_expanded_dict = model.apply_classification_loss(model.cnn_expanded)

    new_train_model(cnn_expanded_dict, dataset_generators, epoch_n=100,
                print_every=10, save_model=False)

    new_train_model(cnn_expanded_dict, dataset_generators, epoch_n=100,
                print_every=10, load_model=True)
示例#4
0
def main():
    # FOR SVHN
    dataset_generators = {
        'train': read_data.svhn_dataset_generator('train', 256),
        'test': read_data.svhn_dataset_generator('test', 256)
    }

    model_dict = model.apply_classification_loss(model.cnn_map)
    visualize(model_dict,
              dataset_generators,
              epoch_n=20,
              print_every=100,
              batch_size=256)
示例#5
0
def test_saving():

    dataset_generators = {
        'train': read_data.svhn_dataset_generator('train', 512),
        'test': read_data.svhn_dataset_generator('test', 512)
    }

    model_dict = model.apply_classification_loss(model.cnn_modified)
    new_train_model(model_dict,
                    dataset_generators,
                    epoch_n=100,
                    print_every=10,
                    variable_list=model_dict['var_list'],
                    save_model=True,
                    load_model=True)
示例#6
0
def main():
    # FOR CIFAR-10
    # dataset_generators = {
    #     'train': read_data.cifar10_dataset_generator('train', 512),
    #     'test': read_data.cifar10_dataset_generator('test', -1)
    # }

    # FOR SVHN
    dataset_generators = {
        "train": read_data.svhn_dataset_generator("train", 512),
        "test": read_data.svhn_dataset_generator("test", 512),
    }

    model_dict = model.apply_classification_loss(model.cnn_map)
    train_model(model_dict, dataset_generators, epoch_n=50, print_every=10)