Exemplo n.º 1
0
def main():
    bs = 64
    n_anchors = 4
    dataset = VOCDataset(TRAIN_JSON, TRAIN_JPEG, device=DEVICE)
    loader = VOCDataLoader(dataset, batch_size=bs, num_workers=0)
    # plotter = VOCPlotter(id2cat=dataset.id2cat, figsize=(12, 10))
    #
    # for images, (boxes, classes) in iter(loader):
    #     with plotter:
    #         plotter.plot_boxes(*to_np(images, boxes, classes))
    #         break  # a single batch to verify everything works

    n_classes = len(dataset.id2cat)
    cycle_len = math.ceil(len(dataset)/bs)
    model = SSD(n_classes=n_classes, bias=-3.)
    optimizer = optim.Adam(model.parameters(), lr=1e-2)
    scheduler = CosineAnnealingLR(optimizer, t_max=cycle_len)
    loop = Loop(model, optimizer, scheduler, device=DEVICE)

    anchors, grid_sizes = [
        x.to(DEVICE) for x in (
            t(make_grid(n_anchors), requires_grad=False).float(),
            t([1/n_anchors], requires_grad=False).unsqueeze(1))]

    bce_loss = BinaryCrossEntropyLoss(n_classes)
    loss_fn = lambda x, y: ssd_loss(x, y, anchors, grid_sizes, bce_loss, n_classes)

    loop.run(
        train_data=loader,
        epochs=100,
        loss_fn=loss_fn,
        callbacks=[Logger()]
    )
Exemplo n.º 2
0
def main():
    datasets = create_or_restore(IMDB)
    train_data = datasets['train_unsup']
    test_data = datasets['test_unsup']

    bs = 50
    bptt = 70

    train = SequenceIterator(to_sequence(train_data), bptt, bs)
    valid = SequenceIterator(to_sequence(test_data), bptt, bs)

    lm = LanguageModel(
        vocab_sz=train_data.vocab.size,
        embed_sz=400, n_hidden=1150)

    dev = device(force_cpu=True) if args.use_cpu else device(args.cuda)
    print('Selected device: %s' % dev)

    opt = optim.Adam(
        lm.parameters(), lr=1e-3, weight_decay=1e-7, betas=(0.8, 0.99))
    cycle_length = len(train_data) // bs
    sched = CosineAnnealingLR(opt, t_max=cycle_length, cycle_mult=1, eta_min=1e-5)
    loop = Loop(lm, opt, sched, device=dev)

    loop.run(train_data=train, valid_data=valid,
             loss_fn=F.cross_entropy,
             metrics=[accuracy],
             callbacks=default_callbacks())

    best_model = loop['Checkpoint'].best_model
    print('Best model: %s' % best_model)
    with open('best', 'w') as file:
        file.write(best_model + '\n')
Exemplo n.º 3
0
def main():
    bs, bptt = 32, 16
    field = Field(lower=True, tokenize=list)
    dataset = TextDataset(field, keep_new_lines=True, min_freq=5)
    factory = lambda seq: SequenceIterator(seq, bptt, bs)
    dataset.build(train=TRAIN_DIR, valid=VALID_DIR, iterator_factory=factory)

    model = RNN(dataset.vocab_size,
                n_factors=50,
                batch_size=128,
                n_hidden=256,
                n_recurrent=3,
                architecture=nn.LSTM)
    optimizer = optim.Adam(model.parameters(), lr=1e-2)
    cycle_length = dataset['train'].total_iters
    scheduler = CosineAnnealingLR(optimizer,
                                  t_max=cycle_length / 2,
                                  eta_min=1e-5)
    callbacks = [EarlyStopping(patience=50), Logger(), History(), Checkpoint()]
    loop = Loop(Stepper(model, optimizer, scheduler, F.nll_loss))

    loop.run(train_data=dataset['train'],
             valid_data=dataset['valid'],
             callbacks=callbacks,
             epochs=500)

    best_model = loop['Checkpoint'].best_model
    model.load_state_dict(torch.load(best_model))
    text = generate_text(model, field, seed='Deep song')
    pretty_print(text)
Exemplo n.º 4
0
def main():
    batch_size = 10000
    num_workers = cpu_count()
    data_transforms = {
        'train':
        transforms.Compose([
            transforms.RandomRotation(4),
            transforms.RandomAffine(degrees=0, translate=(0.05, 0.05)),
            transforms.ToTensor(),
            transforms.Normalize(*STATS)
        ]),
        'valid':
        transforms.Compose(
            [transforms.ToTensor(),
             transforms.Normalize(*STATS)])
    }
    datasets = load_dataset(data_transforms,
                            batch_size=batch_size,
                            num_workers=num_workers)

    n_samples = len(datasets['train']['loader'])
    n_batches = math.ceil(n_samples / batch_size)

    model = ResNet(10)
    opt = optim.Adam(model.parameters(), lr=1e-2)
    sched = CosineAnnealingLR(opt, T_max=n_batches / 4, eta_min=1e-5)
    loop = Loop(model, opt, sched, device=DEVICE)

    loop.run(train_data=datasets['train']['loader'],
             valid_data=datasets['valid']['loader'],
             loss_fn=F.cross_entropy,
             metrics=[accuracy],
             callbacks=default_callbacks(),
             epochs=3)

    best_model = loop['Checkpoint'].best_model
    weights = torch.load(best_model)
    model.load_state_dict(weights)
    x, y = random_sample(datasets['valid']['dataset'])
    y_pred = model(x.to(DEVICE))
    valid_acc = accuracy(y_pred, y.to(DEVICE))
    title = f'Validation accuracy: {valid_acc:2.2%}'
    titles = [str(x) for x in to_np(y_pred.argmax(dim=1))]

    show_predictions(images=to_np(x.permute(0, 3, 2, 1)),
                     suptitle=title,
                     titles=titles)

    dummy_input = torch.randn(16, 1, 28, 28, requires_grad=True).cuda()
    torch.onnx.export(model, dummy_input, 'trivial.onnx', export_params=True)
    core_ml_model = convert('digits.onnx')
    core_ml_model.save('digits.mlmodel')
    print('CoreML model was saved onto disk')
Exemplo n.º 5
0
def main():
    path = '/home/ck/data/cifar10/train'

    dataset = LabelledImagesDataset(labels_from='folders',
                                    root=path,
                                    batch_size=2048,
                                    one_hot=False,
                                    transforms=[to_xy, as_tensor])

    train_data = iter(dataset)
    n = len(train_data)

    model = SimpleResNet([10, 20, 40, 80, 160], 10).cuda()
    optimizer = optim.Adam(model.parameters(), lr=1e-2)
    schedule = CosineAnnealingLR(optimizer, t_max=n / 2, eta_min=1e-5)
    loop = Loop(model, optimizer, schedule)

    loop.run(train_data=train_data, callbacks=[Logger()])
Exemplo n.º 6
0
def main():
    bs = 64
    bptt = 8
    n_factors = 50
    n_hidden = 512
    n_recurrent = 2
    n_epochs = 100

    field = Field(lower=True, tokenize=list)
    dataset = TextDataset(field, min_freq=5)
    factory = lambda seq: SequenceIterator(seq, bptt, bs)
    dataset.build(train=TRAIN_DIR, valid=VALID_DIR, iterator_factory=factory)

    model = RNN(dataset.vocab_size,
                n_factors=n_factors,
                batch_size=bs,
                n_hidden=n_hidden,
                n_recurrent=n_recurrent,
                architecture=nn.LSTM)
    optimizer = optim.Adam(model.parameters(), lr=1e-2)
    cycle_length = dataset['train'].total_iters
    scheduler = CosineAnnealingLR(optimizer, t_max=cycle_length, eta_min=1e-5)
    loop = Loop(model, optimizer, scheduler, device=DEVICE)

    loop.run(train_data=dataset['train'],
             valid_data=dataset['valid'],
             epochs=n_epochs,
             callbacks=[
                 EarlyStopping(patience=50),
                 Logger(),
                 History(),
                 Checkpoint()])

    best_model = loop['Checkpoint'].best_model
    model.load_state_dict(torch.load(best_model))
    text = generate_text(model, field, seed='For thos')
    pretty_print(text)
Exemplo n.º 7
0
def main():
    data_transforms = {
        'train':
        transforms.Compose([
            transforms.RandomRotation(4),
            transforms.RandomAffine(degrees=0, translate=(0.05, 0.05)),
            transforms.ToTensor(),
            transforms.Normalize(*STATS)
        ]),
        'valid':
        transforms.Compose(
            [transforms.ToTensor(),
             transforms.Normalize(*STATS)])
    }

    n_epochs = 3
    batch_size = 4096
    num_workers = 0  # cpu_count()

    datasets = load_dataset(data_transforms, batch_size, num_workers)
    n_samples = len(datasets['train']['loader'])
    n_batches = math.ceil(n_samples / batch_size)

    model = ResNet(10)
    opt = optim.Adam(model.parameters(), lr=1e-2)
    sched = CosineAnnealingLR(opt, T_max=n_batches / 4, eta_min=1e-5)
    loop = Loop(model, opt, sched, device=DEVICE)

    # loop.run(train_data=datasets['train']['loader'],
    #          valid_data=datasets['valid']['loader'],
    #          loss_fn=F.cross_entropy,
    #          metrics=[accuracy],
    #          callbacks=default_callbacks(),
    #          epochs=n_epochs)

    # file_name = loop['Checkpoint'].best_model
    dataset = datasets['valid']['loader']
    # validate_model(model, file_name, dataset, DEVICE)
    export_to_core_ml(model)
Exemplo n.º 8
0
def main():
    root = Path.home() / 'data' / 'cifar10'

    data_transforms = {
        'train':
        transforms.Compose([
            transforms.RandomCrop(32, padding=4),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize(mean=MEAN, std=STD)
        ]),
        'valid':
        transforms.Compose(
            [transforms.ToTensor(),
             transforms.Normalize(mean=MEAN, std=STD)])
    }

    datasets, loaders, dataset_sizes = {}, {}, {}
    for name in ('train', 'valid'):
        dataset = ImageFolder(root / name, data_transforms[name])
        training = name == 'train'
        datasets[name] = dataset
        loaders[name] = DataLoader(dataset=dataset,
                                   batch_size=256,
                                   shuffle=training,
                                   num_workers=cpu_count())
        dataset_sizes[name] = len(dataset)

    n = len(datasets['train'])

    model = CustomResNet()
    optimizer = optim.Adam(model.parameters(), lr=1e-2, weight_decay=1e-5)
    schedule = CosineAnnealingLR(optimizer,
                                 t_max=n,
                                 eta_min=1e-5,
                                 cycle_mult=2)
    loop = Loop(model, optimizer, schedule, device=DEVICE)

    callbacks = [
        History(),
        CSVLogger(),
        Logger(),
        EarlyStopping(patience=50),
        Checkpoint()
    ]

    loop.run(train_data=loaders['train'],
             valid_data=loaders['valid'],
             callbacks=callbacks,
             loss_fn=F.cross_entropy,
             metrics=[accuracy],
             epochs=150)

    dataset = datasets['valid']
    loader = DataLoader(dataset=dataset, batch_size=8, shuffle=True)
    x, y = next(iter(loader))
    state = torch.load(loop['Checkpoint'].best_model)
    model.load_state_dict(state)
    predictions = model(x.cuda())
    labels = predictions.argmax(dim=1)
    verbose = [dataset.classes[name] for name in labels]
    imshow(utils.make_grid(x), title=verbose)
Exemplo n.º 9
0
from settings import *

objects = []
providers = [
]  # глобальные обработчики события отрисовки. Такие обработчики не привязаны к объектам
events_handlers = []  # обрботчики событий pygame

if __name__ == '__main__':
    from core.loop import Loop
    from core.key_bindings import KeyBindings
    from objects.interface.menu import Menu
    from core.game import Game

    pygame.display.init()
    pygame.display.set_caption(name)

    screen = pygame.display.set_mode((width, height))

    pygame.font.init()
    game = Game(objects)

    objects.append(Menu(screen, game))

    loop = Loop(screen, objects)
    game.loop = loop

    KeyBindings.register(pygame.K_ESCAPE, lambda: loop.stop())
    KeyBindings.register(pygame.K_q, lambda: loop.stop())

    loop.run()