Beispiel #1
0
        feats = feats[::-1]
        msk = deconv(feats)
        msk = functional.upsample(msk, scale_factor=4)
        prior = functional.sigmoid(msk)
        loss += criterion(msk, lbl)

        deconv.zero_grad()
        feature.zero_grad()

        loss.backward()

        optimizer_feature.step()
        optimizer_deconv.step()

        # visulize
        image = make_image_grid(inputs.data[:, :3], mean, std)
        writer.add_image('Image', torchvision.utils.make_grid(image), ib)
        msk = functional.sigmoid(msk)
        mask1 = msk.data  # mskdata,分割出来的。
        mask1 = mask1.repeat(1, 3, 1, 1)
        acc = math.e**(0 - loss)
        writer.add_image('Image2', torchvision.utils.make_grid(mask1), ib)
        print('loss: %.4f,  acc %.4f, (epoch: %d, step: %d)' %
              (loss.data[0], acc, it, ib))
        writer.add_scalar('M_global', loss.data[0], istep)
        writer.add_scalar('acc', acc.data[0], istep)
        istep += 1

        del inputs, msk, lbl, loss, feats, mask1, image, acc
        gc.collect()
        if ib % 30 == 0:
Beispiel #2
0
optimizer_feat = torch.optim.Adam(res101.parameters(), lr=1e-4)

for t in range(10):
    for i, (img, label) in enumerate(loader):
        img = img.cuda()
        label = label[0].cuda()
        label = Variable(label)
        input = Variable(img)

        feats = res101(input)
        output = seg(feats)

        seg.zero_grad()
        res101.zero_grad()
        loss = criterion(output, label)
        loss.backward()
        optimizer_feat.step()
        optimizer_seg.step()

        ## see
        input = make_image_grid(img, mean, std)
        label = make_label_grid(label.data)
        label = Colorize()(label).type(torch.FloatTensor)
        output = make_label_grid(torch.max(output, dim=1)[1].data)
        output = Colorize()(output).type(torch.FloatTensor)
        writer.add_image('image', input, i)
        writer.add_image('label', label, i)
        writer.add_image('pred', output, i)
        writer.add_scalar('loss', loss.data[0], i)

        print "epoch %d step %d, loss=%.4f" % (t, i, loss.data.cpu()[0])