Esempio n. 1
0
def test_model(model_name, model, test_loader = validation_set_loader):
    model.load_state_dict(torch.load(str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) + ".pt"))

    if USE_CUDA and cuda_available:
        model = model.cuda()

    model.eval()

    preds = []
    gts = []

    #debug
    i = 0
    
    for batch in test_loader:
        x = Variable(batch['image'])

        if USE_CUDA and cuda_available:
            x = x.cuda()
            pred = model(x).data.cuda().cpu().numpy().copy()
        else:
            pred = model(x).data.numpy().copy()

        gt = batch['label'].numpy().copy()
        preds.append(pred)
        gts.append(gt)

        # debug
        if DEBUG:
            if i == 2:
                break
            else:
                i+=1

    return np.concatenate(preds), np.concatenate(gts)
Esempio n. 2
0
def generate_test_da(model_name, model, test_loader=test_set_loader):
    model.load_state_dict(torch.load(str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) + ".pt"))

    if USE_CUDA and cuda_available:
        model = model.cuda()

    model.eval()

    preds = []
    gts = []
    
    for batch in test_loader:
        x = Variable(batch['image'])
        if USE_CUDA and cuda_available:
            x = x.cuda()
            pred = model(x).data.cuda().cpu().numpy().copy()
        else:
            pred = model(x).data.numpy().copy()

        gt = batch['label'].numpy().copy()
        preds.append(pred)
        gts.append(gt)

    output_test = ""
    for i in range(len(preds)):
        for j in range(len(preds[i])):
            for p in range(len(preds[i][j])):
                output_test += ","+str(preds[i][j][p])
            output_test+="\n"

    f = open(model_name+"_output.csv", "w+")
    f.write(output_test)
    f.close()
Esempio n. 3
0
def generate_test(model_name, model):
    model.load_state_dict(torch.load(str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) + ".pt"))

    test_list = open(TEST_PATH, "r").read().split("\n")
    test_list.remove('')

    output_test = ""

    for img in test_list:
        image_path = "images/"+str(img)
        im = Image.open(image_path).convert("RGB")
        im = transform(im)

        if USE_CUDA and cuda_available:
            model = model.cuda()
        model.eval()

        x = Variable(im.unsqueeze(0))

        if USE_CUDA and cuda_available:
            x = x.cuda()
            pred = model(x).data.cuda().cpu().numpy().copy()
        else:
            pred = model(x).data.numpy().copy()


        if not REGRESSION:
            idx_max_pred = np.argmax(pred)
            idx_classes = idx_max_pred % 16

            output_test += str(img)+","+str(idx_classes)+"\n"
        else:
            output_test += str(img)
            for i in pred[0]:
                output_test += ","+str(i)
            output_test+="\n"
    
    f = open(model_name+"_output.csv", "w+")
    f.write(output_test)
    f.close()
Esempio n. 4
0
model = resnet152_model

if args.inp:
    print("input: ", args.inp)

    image_path = args.inp
    im = Image.open(image_path).convert("RGB")
    im = transform_test(im)

    print(
        str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) +
        ".pt")

    model.load_state_dict(
        torch.load(
            str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) +
            ".pt"))

    if USE_CUDA and cuda_available:
        model = model.cuda()
    model.eval()

    x = Variable(im.unsqueeze(0))

    if USE_CUDA and cuda_available:
        x = x.cuda()
        pred = model(x).data.cuda().cpu().numpy().copy()
    else:
        pred = model(x).data.numpy().copy()

    print(pred)
Esempio n. 5
0
resnet152_model = resnet.resnet152(pretrained=False, **classes)
train_model_iter("resnet152", resnet152_model)

model_name="resnet152"
model=resnet152_model

if args.inp:
    print ("input: ", args.inp)

    image_path = args.inp
    im = Image.open(image_path).convert("RGB")
    im = transform(im)

    print (str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) + ".pt")

    model.load_state_dict(torch.load(str(RESULTS_PATH) + "/" + str(model_name) + "/" + str(model_name) + ".pt"))

    if USE_CUDA and cuda_available:
        model = model.cuda()
    model.eval()

    x = Variable(im.unsqueeze(0))

    if USE_CUDA and cuda_available:
        x = x.cuda()
        pred = model(x).data.cuda().cpu().numpy().copy()
    else:
        pred = model(x).data.numpy().copy()

    print (pred)