def main(opt): if opt.useAE == 1: useAE = True else: useAE = False if opt.seed != -1: random.seed(opt.seed) ctx = mx.gpu() if opt.use_gpu else mx.cpu() inclasspaths, inclasses = dload.loadPaths(opt.dataset, opt.datapath, opt.expname, opt.batch_size + 1, opt.classes) train_data, val_data = load_image.load_image(inclasspaths, opt.batch_size, opt.img_wd, opt.img_ht, opt.noisevar) print('Data loading done.') if opt.istest: testclasspaths = [] testclasslabels = [] if opt.istest: filename = '_testlist.txt' elif opt.isvalidation: filename = '_trainlist.txt' else: filename = '_validationlist.txt' filename = '_trainlist.txt' with open(opt.dataset + "_" + opt.expname + filename, 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1]) == -1: testclasslabels.append(0) else: testclasslabels.append(1) test_data = load_image.load_test_images(testclasspaths, testclasslabels, opt.batch_size, opt.img_wd, opt.img_ht, ctx, opt.noisevar) netG, netD, trainerG, trainerD = set_network(opt.depth, ctx, 0, 0, opt.ndf, opt.ngf, opt.append) netG.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_G.params', ctx=ctx) netD.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_D.params', ctx=ctx) lbllist = [] scorelist1 = [] scorelist2 = [] scorelist3 = [] scorelist4 = [] test_data.reset() count = 0 for batch in (test_data): count += 1 real_in = batch.data[0].as_in_context(ctx) real_out = batch.data[1].as_in_context(ctx) lbls = batch.label[0].as_in_context(ctx) outnn = (netG(real_out)) out_concat = nd.concat(real_out, outnn, dim=1) if opt.append else outnn output4 = nd.mean((netD(out_concat)), (1, 3, 2)).asnumpy() out = (netG(real_in)) out_concat = nd.concat(real_in, out, dim=1) if opt.append else out output = netD(out_concat) #Denoised image output3 = nd.mean(out - real_out, (1, 3, 2)).asnumpy() #denoised-real output = nd.mean(output, (1, 3, 2)).asnumpy() out_concat = nd.concat(real_out, real_out, dim=1) if opt.append else real_out output2 = netD(out_concat) #Image with no noise output2 = nd.mean(output2, (1, 3, 2)).asnumpy() lbllist = lbllist + list(lbls.asnumpy()) scorelist1 = scorelist1 + list(output) scorelist2 = scorelist2 + list(output2) scorelist3 = scorelist3 + list(output3) scorelist4 = scorelist4 + list(output4) fake_img1 = nd.concat(real_in[0], real_out[0], out[0], outnn[0], dim=1) fake_img2 = nd.concat(real_in[1], real_out[1], out[1], outnn[1], dim=1) fake_img3 = nd.concat(real_in[2], real_out[2], out[2], outnn[2], dim=1) fake_img4 = nd.concat(real_in[3], real_out[3], out[3], outnn[3], dim=1) fake_img = nd.concat(fake_img1, fake_img2, fake_img3, fake_img4, dim=2) #print(np.shape(fake_img)) visual.visualize(fake_img) plt.savefig('outputs/T_' + opt.expname + '_' + str(count) + '.png') ''' fpr, tpr, _ = roc_curve(lbllist, scorelist1, 1) roc_auc1 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist2, 1) roc_auc2 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist3, 1) roc_auc3 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist4, 1) roc_auc4 = auc(fpr, tpr) return([roc_auc1, roc_auc2, roc_auc3, roc_auc4]) ''' return ([0, 0, 0, 0]) else: netG, netD, trainerG, trainerD = set_network(opt.depth, ctx, opt.lr, opt.beta1, opt.ndf, opt.ngf, opt.append) if opt.graphvis: print(netG) print('training') print(opt.epochs) loss_vec = train(opt.pool_size, opt.epochs, train_data, val_data, ctx, netG, netD, trainerG, trainerD, opt.lambda1, opt.batch_size, opt.expname, opt.append, useAE=useAE) plt.gcf().clear() plt.plot(loss_vec[0], label="D", alpha=0.7) plt.plot(loss_vec[1], label="G", alpha=0.7) plt.plot(loss_vec[2], label="R", alpha=0.7) plt.plot(loss_vec[3], label="Acc", alpha=0.7) plt.legend() plt.savefig('outputs/' + opt.expname + '_loss.png') return inclasses
def mainEvaluation(opt): ctx = mx.gpu() if opt.use_gpu else mx.cpu() testclasspaths = [] testclasslabels = [] print('loading test files') filename = '_testlist.txt' with open(opt.dataset + "_" + opt.expname + filename, 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1]) == -1: testclasslabels.append(0) else: testclasslabels.append(1) neworder = range(len(testclasslabels)) c = list(zip(testclasslabels, testclasspaths)) print('shuffling') random.shuffle(c) testclasslabels, testclasspaths = zip(*c) print('loading pictures') test_data = load_image.load_test_images(testclasspaths, testclasslabels, opt.batch_size, opt.img_wd, opt.img_ht, ctx, opt.noisevar) print('picture loading done') opt.istest = True networks = models.set_network(opt, ctx, True) netEn = networks[0] netDe = networks[1] netD = networks[2] netD2 = networks[3] load_epoch = opt.epochs - 1 netEn.load_params('checkpoints/' + opt.expname + '_' + str(load_epoch) + '_En.params', ctx=ctx) netDe.load_params('checkpoints/' + opt.expname + '_' + str(load_epoch) + '_De.params', ctx=ctx) if opt.ntype > 1: netD.load_params('checkpoints/' + opt.expname + '_' + str(load_epoch) + '_D.params', ctx=ctx) if opt.ntype > 2: netD2.load_params('checkpoints/' + opt.expname + '_' + str(load_epoch) + '_D2.params', ctx=ctx) print('Model loading done') lbllist = [] scorelist1 = [] scorelist2 = [] scorelist3 = [] scorelist4 = [] test_data.reset() count = 0 for batch in (test_data): count = count + 1 output1 = np.zeros(opt.batch_size) output2 = np.zeros(opt.batch_size) output3 = np.zeros(opt.batch_size) output4 = np.zeros(opt.batch_size) real_in = batch.data[0].as_in_context(ctx) real_out = batch.data[1].as_in_context(ctx) lbls = batch.label[0].as_in_context(ctx) outnn = (netDe(netEn((real_in)))) out = outnn output3 = -1 * nd.mean((outnn - real_out)**2, (1, 3, 2)).asnumpy() if opt.ntype > 1: #AE out_concat = nd.concat(real_in, outnn, dim=1) if opt.append else outnn output1 = nd.mean((netD(out_concat)), (1, 3, 2)).asnumpy() out_concat = nd.concat(real_in, real_in, dim=1) if opt.append else real_in output2 = netD((out_concat)) # Image with no noise output2 = nd.mean(output2, (1, 3, 2)).asnumpy() out = netDe(netEn(real_out)) out_concat = nd.concat(real_in, out, dim=1) if opt.append else out output = netD(out_concat) #Denoised image output4 = nd.mean(output, (1, 3, 2)).asnumpy() lbllist = lbllist + list(lbls.asnumpy()) scorelist1 = scorelist1 + list(output1) scorelist2 = scorelist2 + list(output2) scorelist3 = scorelist3 + list(output3) scorelist4 = scorelist4 + list(output4) out = netDe(netEn(real_in)) # Save some sample results fake_img1 = nd.concat(real_in[0], real_out[0], out[0], outnn[0], dim=1) fake_img2 = nd.concat(real_in[1], real_out[1], out[1], outnn[1], dim=1) fake_img3 = nd.concat(real_in[2], real_out[2], out[2], outnn[2], dim=1) fake_img4 = nd.concat(real_in[3], real_out[3], out[3], outnn[3], dim=1) fake_img = nd.concat(fake_img1, fake_img2, fake_img3, fake_img4, dim=2) visual.visualize(fake_img) plt.savefig('outputs/T_' + opt.expname + '_' + str(count) + '.png') print("Positives" + str(np.sum(lbllist))) print("Negatives" + str(np.shape(lbllist) - np.sum(lbllist))) fpr, tpr, _ = roc_curve(lbllist, scorelist3, 1) roc_auc1 = 0 roc_auc2 = 0 roc_auc4 = 0 roc_auc3 = auc(fpr, tpr) if int(opt.ntype) > 1: #AE fpr, tpr, _ = roc_curve(lbllist, scorelist1, 1) roc_auc1 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist2, 1) roc_auc2 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist4, 1) roc_auc4 = auc(fpr, tpr) return [roc_auc1, roc_auc2, roc_auc3, roc_auc4]
def main(opt): ctx = mx.gpu() if opt.use_gpu else mx.cpu() testclasspaths = [] testclasslabels = [] print('loading test files') filename = '_testlist.txt' with open(opt.dataset + "_" + opt.expname + filename, 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1]) == -1: testclasslabels.append(0) else: testclasslabels.append(1) neworder = range(len(testclasslabels)) neworder = shuffle(neworder) c = list(zip(testclasslabels, testclasspaths)) print('shuffling') random.shuffle(c) #testclasslabels, testclasspaths = zip(*c) #testclasslabels = testclasslabels[1:5000] #testclasspaths = testclasspaths[1:5000] ltnt = 512 print('loading pictures') test_data = load_image.load_test_images(testclasspaths, testclasslabels, opt.batch_size, opt.img_wd, opt.img_ht, ctx, opt.noisevar) print('picture loading done') netEn, netDe, netD, netD2, netDS = set_network(opt.depth, ctx, 0, 0, opt.ndf, opt.ngf, opt.append) netEn.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_En.params', ctx=ctx) netDe.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_De.params', ctx=ctx) netD.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_D.params', ctx=ctx) netD2.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_D2.params', ctx=ctx) netDS.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_SD.params', ctx=ctx) print('Model loading done') lbllist = [] scorelist1 = [] scorelist2 = [] scorelist3 = [] scorelist4 = [] test_data.reset() count = 0 for batch in (test_data): count += 1 print(str(count)) #, end="\r") real_in = batch.data[0].as_in_context(ctx) real_out = batch.data[1].as_in_context(ctx) lbls = batch.label[0].as_in_context(ctx) code = netEn((real_out)) code = code + nd.random.normal( loc=0, scale=0.002, shape=code.shape, ctx=ctx) outnn = (netDe(code)) out_concat = nd.concat(real_out, outnn, dim=1) if opt.append else outnn output4 = nd.mean((netD(out_concat)), (1, 3, 2)).asnumpy() code = netEn(real_in) #code=codet+nd.random.normal(loc=0, scale=0.0000001, shape=code.shape,ctx=ctx) #code2=codet+nd.random.normal(loc=0, scale=0.000001, shape=code.shape,ctx=ctx) #eq_code = heq(code.asnumpy(),2) #code = nd.array(eq_code, ctx=ctx) out = netDe(code) #out2 = netDe(code2) out_concat = nd.concat(real_in, out, dim=1) if opt.append else out output = netD(out_concat) #Denoised image output3 = nd.mean((out - real_out)**2, (1, 3, 2)).asnumpy() #denoised-real output = nd.mean(output, (1, 3, 2)).asnumpy() out_concat = nd.concat(real_out, real_out, dim=1) if opt.append else real_out output2 = netDS(netDe(code)) #Image with no noise output2 = nd.mean(output2, (1, 3, 2)).asnumpy() lbllist = lbllist + list(lbls.asnumpy()) scorelist1 = scorelist1 + list(output) scorelist2 = scorelist2 + list(output2) scorelist3 = scorelist3 + list(output3) scorelist4 = scorelist4 + list(output4) fake_img1 = nd.concat(real_in[0], real_out[0], out[0], outnn[0], dim=1) fake_img2 = nd.concat(real_in[1], real_out[1], out[1], outnn[1], dim=1) fake_img3 = nd.concat(real_in[2], real_out[2], out[2], outnn[2], dim=1) fake_img4 = nd.concat(real_in[3], real_out[3], out[3], outnn[3], dim=1) fake_img = nd.concat(fake_img1, fake_img2, fake_img3, fake_img4, dim=2) #print(np.shape(fake_img)) visual.visualize(fake_img) plt.savefig('outputs/T_' + opt.expname + '_' + str(count) + '.png') if not opt.isvalidation: fpr, tpr, _ = roc_curve(lbllist, scorelist1, 1) roc_auc1 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist2, 1) roc_auc2 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist3, 1) roc_auc3 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist4, 1) roc_auc4 = auc(fpr, tpr) plt.gcf().clear() plt.clf() sns.set(color_codes=True) posscores = [ scorelist3[i] for i, v in enumerate(lbllist) if int(v) == 1 ] negscores = [ scorelist3[i] for i, v in enumerate(lbllist) if int(v) == 0 ] #sns.distplot(posscores, hist=False, label="Known Classes" ,rug=True) sns.kdeplot(posscores, label="Known Classes") sns.kdeplot(negscores, label="Unnown Classes") #plt.hold() #sns.distplot(negscores, hist=False, label = "Unknown Classes", rug=True); plt.legend() plt.savefig('outputs/matdist_' + opt.expname + '_.png') plt.gcf().clear() inputT = nd.zeros((ltnt, ltnt, 1, 1), ctx=ctx) for i in range(0, ltnt): inputT[i, i, :, :] = -1 out = netDe(inputT) count = 0 for i in range(int(math.ceil(math.sqrt(ltnt)))): for j in range(int(math.ceil(math.sqrt(ltnt)))): if count < ltnt: plt.subplot(math.ceil(math.sqrt(ltnt)), math.ceil(math.sqrt(ltnt)), count + 1) plt.imshow( ((out[count].asnumpy().transpose(1, 2, 0) + 1.0) * 127.5).astype(np.uint8)) plt.axis('off') count += 1 plt.savefig('outputs/atoms_' + opt.expname + '_.png') plt.gcf().clear() plt.clf() return ([roc_auc1, roc_auc2, roc_auc3, roc_auc4]) else: return ([0, 0, 0, 0]) fakecode = nd.random_normal(loc=0, scale=1, shape=(16, 4096, 1, 1), ctx=ctx) out = netDe(fakecode) fake_img1 = nd.concat(out[0], out[1], out[2], out[3], dim=1) fake_img2 = nd.concat(out[7], out[6], out[5], out[4], dim=1) fake_img3 = nd.concat(out[8], out[9], out[10], out[11], dim=1) fake_img4 = nd.concat(out[15], out[14], out[13], out[12], dim=1) fake_img = nd.concat(fake_img1, fake_img2, fake_img3, fake_img4, dim=2) #print(np.shape(fake_img)) visual.visualize(fake_img) plt.savefig('outputs/fakes_' + opt.expname + '_.png')
ctx = mx.gpu() if opt.use_gpu else mx.cpu() testclasspaths = [] testclasslabels = [] filename = '_trainlist.txt' with open(opt.dataset+"_"+opt.expname+filename , 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1])==-1: testclasslabels.append(0) else: testclasslabels.append(1) #testclasspaths = testclasspaths[0:1] #testclasslabels = testclasslabels[0:1] test_data = load_image.load_test_images(testclasspaths,testclasslabels,opt.batch_size, opt.img_wd, opt.img_ht, ctx, opt.noisevar) print(ctx) follist = range(0,201,10) folders = range(0,10) for classname in [0]: #folders: #ctx = mx.gpu() if opt.use_gpu else mx.cpu()
def main(opt): ctx = mx.gpu() if opt.use_gpu else mx.cpu() testclasspaths = [] testclasslabels = [] if opt.istest: filename = '_testlist.txt' else: filename = '_validationlist.txt' with open(opt.dataset + "_" + opt.expname + filename, 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1]) == -1: testclasslabels.append(0) else: testclasslabels.append(1) test_data = load_image.load_test_images(testclasspaths, testclasslabels, opt.batch_size, opt.img_wd, opt.img_ht, ctx, opt.noisevar) netEn, netDe, netD, netD2 = set_network(opt.depth, ctx, opt.ngf) netEn.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_En.params', ctx=ctx) netDe.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_De.params', ctx=ctx) netD.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_D.params', ctx=ctx) netD2.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_D2.params', ctx=ctx) lbllist = [] scorelist1 = [] scorelist2 = [] scorelist3 = [] scorelist4 = [] test_data.reset() count = 0 for batch in (test_data): count += 1 real_in = batch.data[0].as_in_context(ctx) real_out = batch.data[1].as_in_context(ctx) lbls = batch.label[0].as_in_context(ctx) out = netDe(netEn(real_out)) output4 = nd.mean((netD2(out)), (1, 3, 2)).asnumpy() out = netDe(netEn(real_in)) #real_concat = nd.concat(out, out, dim=1) output = netD2(out) #Denoised image output3 = nd.mean(out - real_out, (1, 3, 2)).asnumpy() #denoised-real output = nd.mean(output, (1, 3, 2)).asnumpy() print(output) print(lbls) output2 = netD2(real_out) #Image with no noise output2 = nd.mean(output2, (1, 3, 2)).asnumpy() lbllist = lbllist + list(lbls.asnumpy()) scorelist1 = scorelist1 + list(output) scorelist2 = scorelist2 + list(output2) scorelist3 = scorelist3 + list(output3) scorelist4 = scorelist4 + list(output4) fpr, tpr, _ = roc_curve(lbllist, scorelist1, 1) roc_auc1 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist2, 1) roc_auc2 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist3, 1) roc_auc3 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist4, 1) roc_auc4 = auc(fpr, tpr) return ([roc_auc1, roc_auc2, roc_auc3, roc_auc4])
def main(opt): ctx = mx.gpu() if opt.use_gpu else mx.cpu() testclasspaths = [] testclasslabels = [] print('loading test files') if opt.istest: filename = '_testlist.txt' elif opt.isvalidation: filename = '_trainlist.txt' else: filename = '_validationlist.txt' with open(opt.dataset + "_" + opt.expname + filename, 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1]) == -1: testclasslabels.append(0) else: testclasslabels.append(1) neworder = range(len(testclasslabels)) neworder = shuffle(neworder) c = list(zip(testclasslabels, testclasspaths)) print('shuffling') random.shuffle(c) testclasslabels, testclasspaths = zip(*c) testclasslabels = testclasslabels[1:5000] testclasspaths = testclasspaths[1:5000] print('loading pictures') test_data = load_image.load_test_images(testclasspaths, testclasslabels, opt.batch_size, opt.img_wd, opt.img_ht, ctx, opt.noisevar) print('picture loading done') netG, netD, trainerG, trainerD = set_network(opt.depth, ctx, 0, 0, opt.ndf, opt.ngf, opt.append) netG.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_G.params', ctx=ctx) netD.load_params('checkpoints/' + opt.expname + '_' + str(opt.epochs) + '_D.params', ctx=ctx) print('Model loading done') lbllist = [] scorelist1 = [] scorelist2 = [] scorelist3 = [] scorelist4 = [] test_data.reset() count = 0 for batch in (test_data): count += 1 print(str(count)) #, end="\r") real_in = batch.data[0].as_in_context(ctx) real_out = batch.data[1].as_in_context(ctx) lbls = batch.label[0].as_in_context(ctx) outnn = (netG(real_out)) out_concat = nd.concat(real_out, outnn, dim=1) if opt.append else outnn output4 = nd.mean((netD(out_concat)), (1, 3, 2)).asnumpy() out = (netG(real_in)) out_concat = nd.concat(real_in, out, dim=1) if opt.append else out output = netD(out_concat) #Denoised image output3 = nd.mean(out - real_out, (1, 3, 2)).asnumpy() #denoised-real output = nd.mean(output, (1, 3, 2)).asnumpy() out_concat = nd.concat(real_out, real_out, dim=1) if opt.append else real_out output2 = netD(out_concat) #Image with no noise output2 = nd.mean(output2, (1, 3, 2)).asnumpy() lbllist = lbllist + list(lbls.asnumpy()) scorelist1 = scorelist1 + list(output) scorelist2 = scorelist2 + list(output2) scorelist3 = scorelist3 + list(output3) scorelist4 = scorelist4 + list(output4) fake_img1 = nd.concat(real_in[0], real_out[0], out[0], outnn[0], dim=1) fake_img2 = nd.concat(real_in[1], real_out[1], out[1], outnn[1], dim=1) fake_img3 = nd.concat(real_in[2], real_out[2], out[2], outnn[2], dim=1) fake_img4 = nd.concat(real_in[3], real_out[3], out[3], outnn[3], dim=1) fake_img = nd.concat(fake_img1, fake_img2, fake_img3, fake_img4, dim=2) #print(np.shape(fake_img)) visual.visualize(fake_img) plt.savefig('outputs/T_' + opt.expname + '_' + str(count) + '.png') if not opt.isvalidation: fpr, tpr, _ = roc_curve(lbllist, scorelist1, 1) roc_auc1 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist2, 1) roc_auc2 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist3, 1) roc_auc3 = auc(fpr, tpr) fpr, tpr, _ = roc_curve(lbllist, scorelist4, 1) roc_auc4 = auc(fpr, tpr) return ([roc_auc1, roc_auc2, roc_auc3, roc_auc4]) else: return ([0, 0, 0, 0])
with open(dataset + "_" + expname + "_testlist.txt", 'r') as f: for line in f: testclasspaths.append(line.split(' ')[0]) if int(line.split(' ')[1]) == -1: testclasslabels.append(0) else: testclasslabels.append(1) print(np.shape(testclasslabels)) print(batch_size) print(np.shape(testclasspaths)) print('Loading data') test_data = load_image.load_test_images(testclasspaths, testclasslabels, batch_size, img_wd, img_ht, ctx=ctx) print('Loading Done') # Loss GAN_loss = gluon.loss.SigmoidBinaryCrossEntropyLoss() L1_loss = gluon.loss.L1Loss() netG, netD, trainerG, trainerD = set_network() netG.load_params("checkpoints/" + expname + "_" + str(epoch) + "_G.params", ctx=ctx) netD.load_params("checkpoints/" + expname + "_" + str(epoch) + "_D.params", ctx=ctx) print('Loading model done') lbllist = []