Exemplo n.º 1
0
def main():
    maybe_install_wordnet()

    parser = get_parser()
    args = parser.parse_args()

    generate_hierarchy(**vars(args))
    test_hierarchy(args)
    generate_hierarchy_vis(args)
def NBDTLoss(cfg, criterion):
    maybe_install_wordnet()

    class_criterion = getattr(nbdtloss,
                              cfg.MODEL.ROI_RELATION_HEAD.LOSS.NBDT.TYPE)
    NBDTloss = class_criterion(
        dataset='VG150',
        criterion=criterion,
        sample_nums=cfg.MODEL.ROI_RELATION_HEAD.REL_SAMPLES[1:],
        path_graph=cfg.MODEL.ROI_RELATION_HEAD.LOSS.NBDT.PATH_GRAPH,
        path_wnids=cfg.MODEL.ROI_RELATION_HEAD.LOSS.NBDT.PATH_WNIDS,
        tree_supervision_weight=cfg.MODEL.ROI_RELATION_HEAD.LOSS.NBDT.FACTOR)
    return NBDTloss
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
from nbdt import data, analysis, loss, models

import torchvision
import torchvision.transforms as transforms

import os
import argparse
import numpy as np

from nbdt.utils import (progress_bar, generate_fname, generate_kwargs, Colors,
                        maybe_install_wordnet)

maybe_install_wordnet()

datasets = ('CIFAR10', 'CIFAR100') + data.imagenet.names + data.custom.names

parser = argparse.ArgumentParser(description='PyTorch CIFAR Training')
parser.add_argument('--batch-size',
                    default=512,
                    type=int,
                    help='Batch size used for training')
parser.add_argument('--epochs',
                    '-e',
                    default=200,
                    type=int,
                    help='By default, lr schedule is scaled accordingly')
parser.add_argument('--dataset', default='CIFAR10', choices=datasets)
parser.add_argument('--arch',