Example #1
0
def main(args):

    # load config file 
    with open(args.config_dir) as f:
        config = json.load(f)

    # set seeds
    np.random.seed(config['seed'])
    torch.manual_seed(config['seed'])

    # set up cache/output directories and logger
    _set_dirs(config, args.name)
    logger = _set_logger(config, args.name)

    # get embeddings 
    embed_shape, train_embeddings, valid_embeddings = _get_data(config, logger, args.name)

    if config['train']:
        train(config, args.name, logger, train_embeddings, valid_embeddings, embed_shape)
    elif config['test']:
        # _test(config, args.name, logger)
        pass
    else:
        raise logger.error(f"Neither train nor test mode activated.")

    logging.info('Done!')
Example #2
0
def lista():
    template_conf_path = "template.yaml"
    with open(template_conf_path, 'r') as f:
        conf = yaml.load(f)
    conf['subnet_name'] = 'lista'
    
    Ts = [1]
    Cs = [128]
    Ks = [0]
    budget = 128 * 4 * 32
    grid = [(T, C, K) for T in Ts for C in Cs for K in Ks if K < C]
    flag = False
    for T, C, K in grid:
        conf['T'] = T
        conf['n_c'] = C
        conf['e_rank'] = K
        conf['s_rank'] = C // 4
        conf['mb_size'] = budget / (T * C)
        conf['path_tmp'] = 'tmp/lista'
        run_model.train(conf)
        psnr, bl_psnr = run_model.eval_te(conf)
        with open('notes/log_lista.txt', 'a') as f:
            f.write('T: %03d C: %03d K: %03d PSNR: %.2f (%.2f)\n' % \
                    (T, C, K, psnr, bl_psnr))
Example #3
0
def lcod():
    template_conf_path = "template.yaml"
    with open(template_conf_path, 'r') as f:
        conf = yaml.load(f)
    
    Ts = [8]
    Cs = [32]
    Ks = [28]
    budget = 128 * 4 * 16
    grid = [(T, C, K) for T in Ts for C in Cs for K in Ks if K < C]
    flag = False
    for T, C, K in grid:
        print("T: %d C: %03d K: %02d" % (T, C, K))
        time.sleep(2)
        conf['T'] = T
        conf['n_c'] = C
        conf['e_rank'] = K
        conf['mb_size'] = budget / (T * C)
        conf['path_tmp'] = 'tmp/%03d_%03d_%03d' % (T, C, K)
        run_model.train(conf, False)
        psnr, bl_psnr = run_model.eval_te(conf)
        with open('notes/log.txt', 'a') as f:
            f.write('T: %03d C: %03d K: %03d PSNR: %.2f (%.2f)\n' % \
                    (T, C, K, psnr, bl_psnr))
Example #4
0
def lista():
    template_conf_path = "template.yaml"
    with open(template_conf_path, 'r') as f:
        conf = yaml.load(f)
    conf['subnet_name'] = 'lista'

    Ts = [1]
    Cs = [128]
    Ks = [0]
    budget = 128 * 4 * 32
    grid = [(T, C, K) for T in Ts for C in Cs for K in Ks if K < C]
    flag = False
    for T, C, K in grid:
        conf['T'] = T
        conf['n_c'] = C
        conf['e_rank'] = K
        conf['s_rank'] = C // 4
        conf['mb_size'] = budget / (T * C)
        conf['path_tmp'] = 'tmp/lista'
        run_model.train(conf)
        psnr, bl_psnr = run_model.eval_te(conf)
        with open('notes/log_lista.txt', 'a') as f:
            f.write('T: %03d C: %03d K: %03d PSNR: %.2f (%.2f)\n' % \
                    (T, C, K, psnr, bl_psnr))
Example #5
0
def lcod():
    template_conf_path = "template.yaml"
    with open(template_conf_path, 'r') as f:
        conf = yaml.load(f)

    Ts = [8]
    Cs = [32]
    Ks = [28]
    budget = 128 * 4 * 16
    grid = [(T, C, K) for T in Ts for C in Cs for K in Ks if K < C]
    flag = False
    for T, C, K in grid:
        print("T: %d C: %03d K: %02d" % (T, C, K))
        time.sleep(2)
        conf['T'] = T
        conf['n_c'] = C
        conf['e_rank'] = K
        conf['mb_size'] = budget / (T * C)
        conf['path_tmp'] = 'tmp/%03d_%03d_%03d' % (T, C, K)
        run_model.train(conf, False)
        psnr, bl_psnr = run_model.eval_te(conf)
        with open('notes/log.txt', 'a') as f:
            f.write('T: %03d C: %03d K: %03d PSNR: %.2f (%.2f)\n' % \
                    (T, C, K, psnr, bl_psnr))
Example #6
0
    conf['subnet_name'] = 'lista'
    
    Ts = [1]
    Cs = [128]
    Ks = [0]
    budget = 128 * 4 * 32
    grid = [(T, C, K) for T in Ts for C in Cs for K in Ks if K < C]
    flag = False
    for T, C, K in grid:
        conf['T'] = T
        conf['n_c'] = C
        conf['e_rank'] = K
        conf['s_rank'] = C // 4
        conf['mb_size'] = budget / (T * C)
        conf['path_tmp'] = 'tmp/lista'
        run_model.train(conf)
        psnr, bl_psnr = run_model.eval_te(conf)
        with open('notes/log_lista.txt', 'a') as f:
            f.write('T: %03d C: %03d K: %03d PSNR: %.2f (%.2f)\n' % \
                    (T, C, K, psnr, bl_psnr))


if __name__ == '__main__':
    #lcod()
    #lista()
    template_conf_path = "notes/lcod_1_512_28.yaml"
    with open(template_conf_path, 'r') as f:
        conf = yaml.load(f)
    run_model.train(conf)
    run_model.eval_sam(conf);
Example #7
0
model_name = "model_nn1"
model_file_name = model_name + ".ptm"

n_epochs = 10
l_rate = 0.0000001


class MnistNN1(nn.Module):
    def __init__(self, input_dim, inter_dim, labels_dim):
        super(MnistNN1, self).__init__()

        self.lin1 = nn.Linear(input_dim, inter_dim)

        self.lin2 = nn.Linear(inter_dim, labels_dim)

    def forward(self, img):
        out = self.lin1(img.view(1, -1))
        out = self.lin2(out)
        return F.log_softmax(out, dim=1)


model = MnistNN1(784, 800, 10)

model.load_state_dict(ts.load(model_file_name))

run_model.train(model, l_rate, n_epochs, optim.Adam)

run_model.test(model_name, model, n_epochs)

ts.save(model.state_dict(), model_file_name)
Example #8
0
# Imports here
import torch
import argparse
import run_model

# Get data from command line
parser = argparse.ArgumentParser()
parser.add_argument('path', action="store", type=str, help = 'Name of directory where pictures are located, example = flowers')
parser.add_argument('--epochs', type=int, default = 2, help = 'Set epochs for nn model [0-3], default 1')
parser.add_argument('--learning_rate', type=float, default = 0.003, help = 'Set learning rate, default 0.003')
parser.add_argument('--arch', type=str, default = 'densenet121', help = 'Set the torchvision model used for prediction, default = densenet121')
parser.add_argument('--hidden_units', type=int, default = 256, help = 'set hidden units')

path = parser.parse_args().path.strip('/')
epochs = parser.parse_args().epochs
learning_rate = parser.parse_args().learning_rate
arch = parser.parse_args().arch.strip('"')
hidden_units = parser.parse_args().hidden_units

#load data
trainloader, validloader, testloader, class_to_idx = run_model.load_data(path)
# train model
run_model.train(trainloader, validloader, class_to_idx, epochs, learning_rate, arch, hidden_units)
# test model
run_model.test(testloader)
print('Done training')

#and we are done
Example #9
0
    # Important to utilize CPUs even when training on multi-GPU instances as the CPUs can be a bottle neck when feeding to the GPUs
    CPUS = 16

    save_class_names(train_folder, project_name)

    model = create_model(input_size, n_categories)

    # Initialize a dictionary with the layer name and corresponding learning rate ratio

    # Initialize optimizer and compile the model
    model.compile(optimizer='nadam',
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    if GPUS:
        # Intialize a multi-GPU model utilizing keras.utils.multi_gpu_model and then overriding the save and load models so that we can save in a format that the model can be read as a serial model
        model = ModelMGPU(model)

    # Create data generators
    train_datagen, validation_datagen, train_generator, validation_generator, nTrain, nVal = create_generators(
        augmentation_strength,
        target_size,
        batch_size,
        train_folder,
        validation_folder,
        preprocessing_function=None)

    train(model, train_datagen, validation_datagen, train_generator,
          validation_generator, epochs, batch_size, project_name, nTrain, nVal,
          CPUS)
Example #10
0
    conf['subnet_name'] = 'lista'

    Ts = [1]
    Cs = [128]
    Ks = [0]
    budget = 128 * 4 * 32
    grid = [(T, C, K) for T in Ts for C in Cs for K in Ks if K < C]
    flag = False
    for T, C, K in grid:
        conf['T'] = T
        conf['n_c'] = C
        conf['e_rank'] = K
        conf['s_rank'] = C // 4
        conf['mb_size'] = budget / (T * C)
        conf['path_tmp'] = 'tmp/lista'
        run_model.train(conf)
        psnr, bl_psnr = run_model.eval_te(conf)
        with open('notes/log_lista.txt', 'a') as f:
            f.write('T: %03d C: %03d K: %03d PSNR: %.2f (%.2f)\n' % \
                    (T, C, K, psnr, bl_psnr))


if __name__ == '__main__':
    #lcod()
    #lista()
    template_conf_path = "notes/lcod_1_512_28.yaml"
    with open(template_conf_path, 'r') as f:
        conf = yaml.load(f)
    run_model.train(conf)
    run_model.eval_sam(conf)
Example #11
0
import run_model
import config
from storage import pg_migration
from parse_new_data import parse_new_data

if __name__ == "__main__":
    print('starting')
    storage_pg = PGStorage(config.DATABASE_DSN)

    parser = ArgumentParser()
    parser.add_argument('command',
                        metavar='<command>',
                        help="'train' or 'migrate' or 'parse' or 'run-bot'")
    args = parser.parse_args()
    if args.command == "train":
        run_model.train(storage_pg)
        exit(0)
    if args.command == "migrate":
        pg_migration.create_articles_table(storage_pg)
        pg_migration.create_train_data_table(storage_pg)
        exit(0)
    elif args.command == "parse":
        parse_new_data(storage_pg)
        scheduler = BlockingScheduler()
        scheduler.add_job(lambda: parse_new_data(storage_pg),
                          'interval',
                          minutes=30)
        scheduler.start()
        exit(0)
    elif args.command == "run-bot":
        run_bot()