Esempio n. 1
0
def load_model_fpn(_model_weights, is_inference=False):
    print("Using weights {}".format(_model_weights))
    if _model_weights == "imagenet":
        model = FPN(unet_encoder,
                    encoder_weights="imagenet",
                    classes=4,
                    activation=None)
        if is_inference:
            model.eval()
        return model
    else:
        model = FPN(unet_encoder,
                    encoder_weights=None,
                    classes=4,
                    activation=None)
        if is_inference:
            model.eval()
    if _model_weights is not None:
        device = torch.device("cuda")
        model.to(device)
        state = torch.load(
            _model_weights)  # , map_location=lambda storage, loc: storage)
        model.load_state_dict(state["state_dict"])
        # new_state_dict = OrderedDict()
        #
        # for k, v in state['state_dict'].items():
        #     if k in model.state_dict():
        #         new_state_dict[k] = v
        # model = model.load_state_dict(new_state_dict)
    return model
Esempio n. 2
0
def fpn(backbone, pretrained_weights=None, classes=1, activation='sigmoid'):
    device = torch.device("cuda")
    model = FPN(encoder_name=backbone,
                encoder_weights=pretrained_weights,
                classes=classes,
                activation=activation)
    model.to(device)
    model.eval()  # 위치 확인해볼것

    return model
                   encoder_weights=None,
                   classes=4,
                   activation=None)
model_senet.to(device)
model_senet.eval()
state = torch.load(
    '../input/senetmodels/senext50_30_epochs_high_threshold.pth',
    map_location=lambda storage, loc: storage)
model_senet.load_state_dict(state["state_dict"])

model_fpn91lb = FPN(encoder_name="se_resnext50_32x4d",
                    classes=4,
                    activation=None,
                    encoder_weights=None)
model_fpn91lb.to(device)
model_fpn91lb.eval()
#state = torch.load('../input/fpnseresnext/model_se_resnext50_32x4d_fold_0_epoch_7_dice_0.935771107673645.pth', map_location=lambda storage, loc: storage)
state = torch.load(
    '../input/fpnse50dice944/model_se_resnext50_32x4d_fold_0_epoch_26_dice_0.94392.pth',
    map_location=lambda storage, loc: storage)
model_fpn91lb.load_state_dict(state["state_dict"])

model_fpn91lb_pseudo = FPN(encoder_name="se_resnext50_32x4d",
                           classes=4,
                           activation=None,
                           encoder_weights=None)
model_fpn91lb_pseudo.to(device)
model_fpn91lb_pseudo.eval()
#state = torch.load('../input/fpnseresnext/model_se_resnext50_32x4d_fold_0_epoch_7_dice_0.935771107673645.pth', map_location=lambda storage, loc: storage)
state = torch.load(
    '../input/942-finetuned-on-pseudo-to9399/pseudo_fpn_se_resnext50_32x4d_fold_0_epoch_22_dice_0.944/pseudo_fpn_se_resnext50_32x4d_fold_0_epoch_22_dice_0.9446276426315308.pth',