Esempio n. 1
0
                 decay=0.00001,
                 amsgrad=True)

# In[12]:

csv_logger = CSVLogger(csvlogger_name, append=True, separator=',')

checkpoint = ModelCheckpoint(checkpoint_filename,
                             monitor='val_loss',
                             verbose=1,
                             save_best_only=False,
                             period=1)

earlystopping = EarlyStopping(patience=5, mode='min')

#callbacks = [tensorboard, checkpoint]
callbacks = [checkpoint, csv_logger, earlystopping]

# In[13]:

model.compile(loss='mae', optimizer=optimizer, metrics=['mse'])

# In[14]:

model.fit_generator(train_generator,
                    epochs=epochs,
                    steps_per_epoch=steps_per_epoch,
                    validation_data=validation_generator,
                    validation_steps=validation_steps,
                    callbacks=callbacks)
Esempio n. 2
0
    root = './data'
    download = True  # download MNIST dataset or not

    trans = transforms.Compose(
        [transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))])
    train_set = dset.MNIST(root=root, train=True,
                           transform=trans, download=download)
    test_set = dset.MNIST(root=root, train=False, transform=trans)

    model = LeNet()

    criterion = nn.CrossEntropyLoss()
    optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9)

    model.compile(optimizer, criterion, metrics=['top1', 'top2'])

elif test_type == 'multi tasks classification':

    train_set = CategoricalDatasetMultiTasks(N, D_in, D_out1, D_out2)
    test_set = CategoricalDatasetMultiTasks(
        int(N * 0.25), D_in, D_out1, D_out2)

    model = MultiTasksClassification(D_in, H1, H2, D_out1, D_out2)

    optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9)

    model.compile(optimizer, multiCrossEntropy, metrics=[
                  'top1', 'top2'], multi_tasks=['output_a', 'output_b'])

train_loader = torch.utils.data.DataLoader(