Esempio n. 1
0
def modify_classifier(model, model_name, num_classes):
    if 'alexnet' in model_name:
        num_features = model.classifier[6].in_features
        model.classifier[6] = nn.Linear(num_features, num_classes)
    elif 'vgg' in model_name:
        num_features = model.classifier[6].in_features
        model.classifier[6] = nn.Linear(num_features, num_classes)
    elif 'resnet' in model_name:
        num_features = model.fc.in_features
        model.fc = nn.Linear(num_features, num_classes)
    elif 'squeezenet' in model_name:
        model.classifier[1] = nn.Conv2d(512, num_classes, kernel_size=(1,1), stride=(1,1))
        model.num_classes = num_classes
    elif 'densnet' in model_name:
        num_features = model.classifier.in_features
        model.classifier = nn.Linear(num_features, num_classes)
    elif 'inception' in model_name:
        num_features = model.AuxLogits.fc.in_features
        model.AuxLogits.fc = nn.Linear(num_features, num_classes)
        # Handle the primary net
        num_features = model.fc.in_features
        model.fc = nn.Linear(num_features,num_classes)
    elif 'mobilenet' in model_name:
        raise NotImplementedError()
    elif 'resnetxt' in model_name:
        raise NotImplementedError()
    else:
        log.error('Invalid model name {} for modifying a classifier'.format(model_name))
        exit()
    return model
Esempio n. 2
0
def build(model_config, checkpoint):
    if 'name' not in model_config:
        log.error('Specify a model name')
    model_name = model_config['name']

    # build model
    if model_name in SUPERVISED_MODELS:
        log.infov('{} model is built'.format(model_name.upper()))
        model = build_supervised_model(model_name, model_config)
    elif model_name in SEMI_MODELS:
        log.infov('{} model is built'.format(model_name.upper()))
        model = build_semi_model(model_name, model_config)
    else:
        SUPERVISED_MODELS.update(SEMI_MODELS)
        log.error(
            'Enter valid model name among {}'.format(SUPERVISED_MODELS)
        ); exit()

    # load model
    if checkpoint is not None:
        model.load_state_dict(checkpoint['model_state_dict'])
        log.infov('Model is built using the given checkpoint')
    else:
        log.infov('Model is built without checkpoint')

    # parallelize model
    if torch.cuda.device_count() > 1:
        model = torch.nn.DataParallel(model)
        log.warn("{} GPUs will be used.".format(torch.cuda.device_count()))

    return model
Esempio n. 3
0
    def __init__(self, data_dir, metadata_file, label_type, transform=None):
        self.data_dir = data_dir

        # load meta data
        meatadata_path = os.path.join(self.data_dir, metadata_file)
        with open(meatadata_path, 'r') as f:
            self.metadata = json.load(f)

        # initialize label map from the original label to a new label
        if label_type not in self.LABEL_TYPES:
            log.error('Specify right label type for TNC dataset - binary or multi')
        self.label_map = self.LABEL_TYPES[label_type]

        # transformer
        self.transform = transform
Esempio n. 4
0
def build(data_config):
    if 'name' not in data_config:
        log.error('Specify a data name')

    data_params = {
        'data_name': data_config['name'],
        'mode': data_config['mode'],
        'root_dir': data_config.get('root_dir', '/mnt/nfs/work1/ds4cg'),
        'batch_size': data_config.get('batch_size', 128),
        'num_workers': data_config.get('num_workers', 4),
        'label_type': data_config.get('label_type', 'binary')
    }

    dataloader = load(**data_params)
    return dataloader
Esempio n. 5
0
def build(train_config, optimizer, checkpoint):
    if 'lr_schedule' not in train_config:
        log.infov('No scheduler is specified')
        return None

    schedule_config = train_config['lr_schedule']
    scheduler_name = schedule_config.pop('name', 'step_lr')
    schedule_config['optimizer'] = optimizer

    if scheduler_name in SCHEDULERS:
        scheduler = SCHEDULERS[scheduler_name](**schedule_config)
    else:
        log.error('Specify valid scheduler name among {}'.format(
            SCHEDULERS.keys()))
        exit()

    if checkpoint is not None:
        scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
    log.infov('{} scheduler is built'.format(scheduler_name.upper()))

    return scheduler
Esempio n. 6
0
def build(train_config, model_params, checkpoint):
    if 'optimizer' not in train_config:
        log.error('Specify an optimizer')
        exit()

    optim_config = train_config['optimizer']
    optimizer_name = optim_config.pop('name', 'sgd')
    optim_config['params'] = model_params

    if optimizer_name in OPTIMIZERS:
        optimizer = OPTIMIZERS[optimizer_name](**optim_config)
    else:
        log.error('Specify valid optimizer name among {}'.format(
            OPTIMIZERS.keys()))
        exit()

    if checkpoint is not None:
        optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
    log.infov('{} optimizer is built'.format(optimizer_name.upper()))

    return optimizer
Esempio n. 7
0
    def __init__(self, mode, config_name, tag):
        self.mode = mode
        self.tag = util.generate_tag(tag)

        # assign configurations
        config = util.load_config(config_name)
        self.model_config = config['model']
        self.train_config = config['train']
        self.eval_config = config['eval']
        self.data_config = config['data']

        # misc information
        self.model_name = self.model_config['name']
        self.num_classes = self.model_config['num_classes']

        # setup a directory to store checkpoints or evaluation results
        util.setup(self.mode, self.model_name, self.tag)

        # store data name to data config depending on which mode we are on
        if self.mode == 'train':
            data_name = self.train_config['data']
        elif self.mode == 'eval':
            data_name = self.eval_config['data']
        else:
            log.error('Specify right mode - train, eval')
            exit()

        self.data_config['name'] = data_name
        self.data_config['mode'] = mode

        # determine which device to use - cpu or gpu
        device = "cuda" if torch.cuda.is_available() else "cpu"
        self.device = torch.device(device)

        if device == "cpu":
            log.warn("GPU is not available. Please check the configuration.")
        else:
            log.warn("GPU is available.")

        self.writer = SummaryWriter()
Esempio n. 8
0
    def __init__(self, data_dir, metadata_file, label_type, transform=None, mode='train'):
        # train / eval
        if mode not in self.MODES:
          log.error('Specify right mode for WILDCAM dataset - train, eval'); exit()
        self.mode = mode
        self.data_dir = data_dir

        if self.mode != 'eval':
          # load meta data
          metadata_path = os.path.join(self.data_dir, metadata_file)
          with open(metadata_path, 'r') as f:
            self.metadata = json.load(f)

          # initialize label map from the original label to a new label
          if label_type not in self.LABEL_TYPES:
            log.error('Specify right label type for WILDCAM dataset - binary')
          self.label_map = self.LABEL_TYPES[label_type]
        else:
          self.data_dir = os.path.join(data_dir, 'test')
          self.metadata = fnmatch.filter(os.listdir(self.data_dir), '*.jpg')

        # transformers
        self.transform = transform
Esempio n. 9
0
def load(mode, data_name, root_dir, batch_size, num_workers, label_type):
    if data_name == 'nacti':
      if mode == 'eval':
        log.error('Evaluation dataset for {} is not available'.format(data_name)); exit()
      dataloaders = load_nacti(mode, data_name, root_dir, batch_size, num_workers, label_type)
    elif data_name == 'wildcam':
      dataloaders = load_wildcam(mode, data_name, root_dir, batch_size, num_workers, label_type)
    elif data_name == 'tnc':
      dataloaders = load_tnc(mode, data_name, root_dir, batch_size, num_workers, label_type)
    else:
      log.error('Specify right data name for {} - nacti, tnc, wildcam'.format(mode))

    if mode == 'train':
      dataloader = {'train': dataloaders[0], 'val': dataloaders[1]}
    elif mode == 'eval':
      dataloader = {'eval': dataloaders}
    else:
      log.error('Specify right mode - train, eval')

    return dataloader
Esempio n. 10
0
 def __init__(self, gamma=0, alpha=None, size_average=True):
     super(BinaryClassFocalLoss, self).__init__(gamma, alpha, size_average)
     if not (alpha is None or isinstance(alpha, (float, int))):
         log.error('Wrong alpha is given')
         exit()