Exemplo n.º 1
0
        # optim scheduler
        scheduler = None
        if CONFIG['type'] == "transformer":
            scheduler = optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99)

        # record best result
        BEST_RESULT = {
            "h_acc": 0,
            "epoch": 0
        }

        for epoch in range(CONFIG['start_epoch'], CONFIG['end_epoch']):

            # train
            train_metrics = model_epoch(loss_name="trainval", epoch=epoch, model=model, neg_sample=CONFIG['neg_sample'],
                                        data_loader=train_loader, concepts=concepts,
                                        optimizer=optimizer, writer=writer, debug=CONFIG['debug'])

            for g in [False, True]:
                record_name = 'train_g' if g else 'train'
                train_class, train_acc = utils.cal_acc(train_metrics, g)
                writer.add_scalar(record_name + '_acc', train_acc * 100, epoch)

                if CONFIG['skewness']:
                    train_skew = utils.skewness(train_metrics, g)
                    writer.add_scalar(record_name + '_skewness', train_skew, epoch)

            ######################################################################################
            # test
            record = {tn: {'acc': 0.0, 'class': None} for tn in STATE['split_list'][1:]}
            record.update({tn + '_g': {'acc': 0.0, 'class': None} for tn in STATE['split_list'][1:]})
Exemplo n.º 2
0
        scheduler = None
        if CONFIG['type'] == "transformer":
            scheduler = optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99)

        # record best result
        BEST_RESULT = {"h_acc": 0, "epoch": 0}

        for epoch in range(CONFIG['start_epoch'], CONFIG['end_epoch']):

            # train
            train_metrics = model_epoch(loss_name="trainval",
                                        epoch=epoch,
                                        model=model,
                                        type=CONFIG['type'],
                                        neg_sample=CONFIG['neg_sample'],
                                        data_loader=train_loader,
                                        concepts=concepts,
                                        use_smooth=False,
                                        margin=0.1,
                                        optimizer=optimizer,
                                        writer=writer,
                                        debug=CONFIG['debug'])

            for g in [False, True]:
                record_name = 'train_g' if g else 'train'
                train_class, train_acc = utils.cal_acc(train_metrics, g)
                writer.add_scalar(record_name + '_acc', train_acc * 100, epoch)

                if CONFIG['skewness']:
                    train_skew = utils.skewness(train_metrics, g)
                    writer.add_scalar(record_name + '_skewness', train_skew,
                                      epoch)
Exemplo n.º 3
0
            # test
            # record = {i: {'acc': 0.0, 'class': None} for i in ['conv', 'general']}
            test_metrics = {
                'total': deque(),
                'correct': deque(),
                'total_g': deque(),
                'correct_g': deque()
            }

            for tn in STATE['split_list'][1:]:

                test_metric = model_epoch(loss_name=tn,
                                          epoch=epoch,
                                          model=model,
                                          type=CONFIG['type'],
                                          data_loader=test_loaders[tn],
                                          concepts=concepts,
                                          optimizer=None,
                                          writer=None,
                                          debug=CONFIG['debug'])

                for k in test_metrics.keys():
                    v = np.asarray(test_metric[k]).argmax(axis=1)
                    test_metrics[k].extend(v)

            print(DATASET + ' skewness:')

            for g in [False, True]:
                test_skew = utils.skewness(test_metrics, g)
                print(test_skew)
Exemplo n.º 4
0
    optimizer = optim.SGD(params, L_RATE, momentum=CONFIG['momentum'])

    scheduler = optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.9)
    # scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.5)
    # scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[1, 8], gamma=0.1)

    for epoch in range(CONFIG['start_epoch'], CONFIG['end_epoch']):

        scheduler.step()

        # training
        train_metrics = model_epoch(loss_name="trainval",
                                    mode="train",
                                    epoch=epoch,
                                    model=model,
                                    k=CONFIG['k'],
                                    d=CONFIG['d'],
                                    data_loader=train_loader,
                                    concepts=concepts,
                                    optimizer=optimizer,
                                    writer=writer)

        torch.save(model.state_dict(),
                   PJ(SAVE_PATH, 'epoch' + str(epoch) + '.pkl'))

        train_class, train_acc = cal_acc(train_metrics)
        writer.add_scalar('trainval_acc', train_acc * 100, epoch)

        ######################################################################################

        # test
        record = {
Exemplo n.º 5
0
            'params': model.transform.parameters() if CONFIG['freeze'] else model.parameters()
        }]

        if CONFIG['optim'] == 'SGD':
            optimizer = optim.SGD(params, np.float64(CONFIG['l_rate']), momentum=CONFIG['momentum'])

        elif CONFIG['optim'] == 'Adam':
            optimizer = optim.Adam(params, np.float64(CONFIG['l_rate']))

        for epoch in range(1, CONFIG['end_epoch']):

            writer = SummaryWriter(PJ(SAVE_PATH, 'val' + str(val_times)))

            # training
            train_metrics = model_epoch(loss_name="train", epoch=epoch, model=model,
                                        data_loader=train_loader, concepts=concepts,
                                        optimizer=optimizer, writer=writer)

            for g in [False, True]:
                record_name = 'train_g' if g else 'train'
                train_class, train_acc = utils.cal_acc(train_metrics, g)
                writer.add_scalar(record_name + '_acc', train_acc * 100, epoch)

            ######################################################################################

            # val
            record = {'val': {'acc': 0.0, 'class': None}}
            record.update({'val_g': {'acc': 0.0, 'class': None}})

            val_metric = model_epoch(loss_name="val", epoch=epoch, model=model,
                                     data_loader=val_loader, concepts=concepts,