Exemple #1
0
def main(init_epoch):
    parameter_list = list(
        model_CNN.parameters()) + list(model_avg_fc.parameters()) + list(
            model_reg.parameters())  #+list(model_class.parameters())

    parameters_to_optimize = parameter_list
    # print(len(parameter_list)," $$$$$4")
    #exit()
    # optimizer = optim.Adam(parameters_to_optimize,lr=0.01)
    #torch.optim.Adadelta(params, lr=1.0, rho=0.9, eps=1e-06, weight_decay=0)
    # optimizer = optim.Adadelta(parameters_to_optimize,lr=0.0001,weight_decay=0.5)
    optimizer = optim.Adam(parameters_to_optimize, lr=0.0001)
    l1 = nn.L1Loss()
    l2 = nn.MSELoss()
    c1 = nn.CrossEntropyLoss()
    c2 = nn.CrossEntropyLoss()
    c3 = nn.CrossEntropyLoss()
    c4 = nn.CrossEntropyLoss()
    c5 = nn.CrossEntropyLoss()

    action_criterions = (c1, c2, c3, c4, c5)
    train_dataset = VideoDataset('train')
    test_dataset = VideoDataset('test')
    train_dataloader = DataLoader(train_dataset,
                                  batch_size=train_batch_size,
                                  shuffle=True)
    test_dataloader = DataLoader(test_dataset,
                                 batch_size=test_batch_size,
                                 shuffle=False)
    grph = graph()

    for epoch in range(init_epoch, num_epochs):

        start = timeit.default_timer()

        print(
            '-------------------------------------------------------------------------------------------------------'
        )

        #train_phase(train_dataloader, optimizer, criterion, epoch)
        tr_loss = train_phase(train_dataloader, optimizer, epoch, l1, l2,
                              action_criterions)
        ts_loss = test_phase(test_dataloader, epoch, l1, l2, action_criterions)
        print(" average training loss:{} , average test loss:{}".format(
            tr_loss, ts_loss))
        grph.update_graph(tr_loss, ts_loss)
        #if epoch == 0:  # save models every 5 epochs
        grph.draw_and_save()
        # if epoch%4==0 or epoch==num_epochs-1 :
        save_model(model_CNN, 'model_CNN', model_saving_dir, epoch)
        save_model(model_avg_fc, 'model_avg_fc', model_saving_dir, epoch)
        save_model(model_reg, 'model_reg', model_saving_dir, epoch)
        save_model(model_class, 'model_class', model_saving_dir, epoch)
        stop = timeit.default_timer()
        print("time taken each epoch {} seconds".format(stop - start))
    grph.draw_and_save()
Exemple #2
0
def main():

    parameters_2_optimize = (list(model_lstm.parameters()))

    optimizer = optim.Adam(parameters_2_optimize, lr=0.0001)

    criterion = nn.MSELoss()

    train_dataset = VideoDataset('train')
    test_dataset = VideoDataset('test')
    train_dataloader = DataLoader(train_dataset,
                                  batch_size=train_batch_size,
                                  shuffle=True)
    test_dataloader = DataLoader(test_dataset,
                                 batch_size=test_batch_size,
                                 shuffle=False)

    # actual training, testing loops
    for epoch in range(10):
        saving_dir = '...'
        if epoch == 0:  # save models every 5 epochs
            save_model(model_lstm, 'model_my_lstm', epoch, saving_dir)

        print(
            '-------------------------------------------------------------------------------------------------------'
        )

        train_phase(train_dataloader, optimizer, criterion, epoch)
        test_phase(test_dataloader)
        if (epoch +
                1) % model_ckpt_interval == 0:  # save models every 5 epochs
            save_model(model_lstm, 'model_my_lstm', epoch, saving_dir)

        # lr updates
        if (epoch + 1) % global_lr_stepsize == 0:
            learning_rate = learning_rate * global_lr_gamma
            for param_group in optimizer.param_groups:
                param_group['lr'] = learning_rate
def main():
    #parameter_list=list(model_CNN.parameters())+list(model_avg_fc.parameters())+list(model_reg.parameters())+list(model_class.parameters())

    #parameters_to_optimize = parameter_list
    # print(len(parameter_list)," $$$$$4")
    #exit()
    # optimizer = optim.Adam(parameters_to_optimize,lr=0.01)
    #torch.optim.Adadelta(params, lr=1.0, rho=0.9, eps=1e-06, weight_decay=0)
    #optimizer = optim.Adadelta(parameters_to_optimize,lr=0.0001,weight_decay=0.5)
    l1 = nn.L1Loss()
    l2 = nn.MSELoss()
    c1 = nn.CrossEntropyLoss()
    c2 = nn.CrossEntropyLoss()
    c3 = nn.CrossEntropyLoss()
    c4 = nn.CrossEntropyLoss()
    c5 = nn.CrossEntropyLoss()

    action_criterions = (c1, c2, c3, c4, c5)
    #train_dataset = VideoDataset('train')
    test_dataset = VideoDataset('test')
    #train_dataloader = DataLoader(train_dataset, batch_size=train_batch_size, shuffle=True)
    test_dataloader = DataLoader(test_dataset,
                                 batch_size=test_batch_size,
                                 shuffle=False)
    #grph = graph()

    for epoch in range(1):
        saving_dir = "saved_models"

        print(
            '-------------------------------------------------------------------------------------------------------'
        )

        #train_phase(train_dataloader, optimizer, criterion, epoch)
        #tr_loss=train_phase(train_dataloader, optimizer, epoch,l1,l2,action_criterions)
        ts_loss = test_phase(test_dataloader, epoch, l1, l2, action_criterions)