def plot_and_save_statistics(solvers, args): """Plot some desired statistics.""" dname = args.dataset if not args.use_gray else args.dataset + '.gray' for k, v in solvers.items(): # save statistics stats_arr = su.ntpl2array(v.getitstat()) np.save(os.path.join(args.output_path, k, f'{dname}.stats.npy'), stats_arr) # we save time separately time_stats = {'Time': v.getitstat().Time} pickle.dump( time_stats, open(os.path.join(args.output_path, k, f'{dname}.time_stats.pkl'), 'wb')) # save dictionaries visualization plt.clf() d = v.getdict().squeeze() if d.ndim == 3: # grayscale image plt.imshow(su.tiledict(d), cmap='gray') else: plt.imshow(su.tiledict(d)) plt.savefig(os.path.join(args.output_path, k, f'{dname}.pdf'), bbox_inches='tight') if 1: plt.clf() nsol = len(solvers) for i, (k, v) in enumerate(solvers.items()): plt.subplot(1, nsol, i + 1) d = v.getdict().squeeze() if d.ndim == 3: # grayscale image plt.imshow(su.tiledict(d), cmap='gray') else: plt.imshow(su.tiledict(d)) plt.title(k) plt.show()
def visualize_dicts(solvers): """Show visualizations of learned dictionaries.""" try: import matplotlib.pyplot as plt _, ax = plt.subplots(1, len(solvers), figsize=(7 * len(solvers), 7)) except: logger.warning('plt is not available, thus not visualizing dicts') return for i, (k, v) in enumerate(solvers.items()): tiled = su.tiledict(v.getdict().squeeze()) if tiled.ndim == 2: ax[i].imshow(tiled, cmap='gray') else: ax[i].imshow(tiled) ax[i].set_title(k) if cfg.SNAPSHOT: fig0, ax0 = plt.subplots() if tiled.ndim == 2: ax0.imshow(tiled, cmap='gray') else: ax0.imshow(tiled) fig0.savefig(os.path.join(cfg.OUTPUT_PATH, k, 'dict.pdf'), bbox_inches='tight') plt.close(fig0) plt.show()
def solve(self): for i in range(total_ite): roopcount = roopcount + 1 X, y, prx, dux, hize = coefficient_learning( d, X, y, S, N * N, M, rhox, lamd, d_size, coef_ite) # logging prx_log.append(prx[coef_ite - 1]) dux_log.append(dux[coef_ite - 1]) hizero.append(hize) # fft xf = X_to_xf(X, N * N, M) d, dcon, prd, dud = dictinary_learning(d, dcon, xf, S, N * N, M, rhod, d_size, dict_ite) # logging prd_log.append(prd[0]) dud_log.append(dud[0]) # prox of support function d = pr_d(d, N * N, M, d_size) # X = X.transpose(1, 2, 0) # crop d = d[:, :d_size, :d_size] if i == total_ite - 1: d = d.transpose(1, 2, 0) plot.imview(util.tiledict(d), fgsz=(7, 7)) d = d.transpose(2, 0, 1) mindx_s = dx_s(d, xf, S, N * N, M, K, d_size) + lamd * l1x(X, N * N, M) goal.append(mindx_s) D = D_to_d(d, N * N, d_size) image = reconstruct(xf[0], D, N * N, M) print("iterate number: ", roopcount) imgr = sl1 + image # imgr = image print("Reconstruction PSNR: %.2fdB\n" % sm.psnr(ori[0], imgr))
def load_cifar10_train(train_amount): data = datasets.CIFAR10("../data", train = True, download = True); imgs, labels = data.data[0:train_amount], np.array(data.targets[0:train_amount]); print("load_cifar_train: return following shape arrays"); print("imgs:", imgs.shape); print("labels:", labels.shape); plot.imview(util.tiledict(imgs.transpose(1,2,3,0)[:, :, :, :25])); return np.float64(imgs)/255.0, labels;
def load_cifar10_test(test_amount): data = datasets.CIFAR10("../data", download = True); imgs, labels = data.data, np.array(data.targets); imgs, labels = imgs[imgs.shape[0]-test_amount:], labels[labels.shape[0]-test_amount] print("load_cifar10_test: return following shape arrays"); print("imgs:", imgs.shape); print("labels:", labels.shape); plot.imview(util.tiledict(imgs.transpose(1,2,3,0)[:, :, :, :25])); return np.float64(imgs)/255.0, labels;
def save_result(D0, D, X, S, S_reconstructed, filename): titles = [[], []] r1 = [] for k in range(S.shape[-1]): r1.append(S.T[k].T) titles[0].append('') r1.append(util.tiledict(D0)) titles[0].append('') r2 = [] for k in range(S.shape[-1]): r2.append(S_reconstructed.T[k].T) psnr = sm.psnr(S.T[k].T, S_reconstructed.T[k].T) ssim = compare_ssim(S.T[k].T, S_reconstructed.T[k].T) l0 = strict_l0norm(np.rollaxis(X, 2)[k]) titles[1].append("PSNR: %.3fdb\nSSIM: %.4f\nl0norm: %d" % (psnr, ssim, l0)) r2.append(util.tiledict(D)) titles[1].append('') saveimg2D(np.array([r1, r2]), filename, np.array(titles))
def _callback(d): """Snapshot dictionaries for every iteration.""" _D = d.getdict().squeeze() np.save(os.path.join(path, '{}.{}.npy'.format(dname, d.j)), _D) if args.visdom is not None: tiled_dict = su.tiledict(_D) if not args.use_gray: tiled_dict = tiled_dict.transpose(2, 0, 1) args.visdom.image(tiled_dict, opts=dict(caption='ConvBPDNMaskDictLearn.{}'.format(d.j))) return 0
def par_csc(input_, d_size, lmbda, Iter, visualize=False): D0 = np.random.uniform(-1.0, 1.0, d_size) opt = prlcnscdl.ConvBPDNDictLearn_Consensus.Options({ 'Verbose': True, 'MaxMainIter': Iter, 'CBPDN': { 'rho': 50.0 * lmbda + 0.5 }, 'CCMOD': { 'rho': 1.0, 'ZeroMean': True } }) d = prlcnscdl.ConvBPDNDictLearn_Consensus(D0, input_, lmbda, opt, nproc=12) D1 = d.solve() print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) if visualize: plot_2([util.tiledict(D0.squeeze()), util.tiledict(D1.squeeze())], ["initial dictionary", "learned dictionary"]) return d, D1, d.getcoef()
def nn_csc(input_, d_size, lmbda, Iter, visualize=False): D0 = np.random.uniform(0, 1.0, d_size) opt = cbpdndl.ConvBPDNDictLearn.Options( { 'Verbose': True, 'MaxMainIter': Iter, 'CBPDN': { 'rho': 50.0 * lmbda + 0.5, 'NonNegCoef': True }, 'CCMOD': { 'rho': 10.0, 'ZeroMean': True } }, dmethod='cns') d = cbpdndl.ConvBPDNDictLearn(D0, input_, lmbda, opt, dmethod='cns') D1 = d.solve() print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) if visualize: plot_2([util.tiledict(D0.squeeze()), util.tiledict(D1.squeeze())], ["initial dictionary", "learned dictionary"]) return d, D1, d.getcoef()
def output_Image(flag, Img, file, path): if flag == 0: fig1 = plot.figure() plot.subplot(1, 1, 1) plot.imview(util.tiledict(Img[0]), title='Initial Dctionary(Layer1)', fig=fig1) fig1.savefig(path + '\\' + file + '(Layer1).png') fig2 = plot.figure() plot.subplot(1, 1, 1) plot.imview(util.tiledict(Img[1]), title='Initial Dctionary(Layer2)', fig=fig2) fig2.savefig(path + '\\' + file + '(Layer2).png') elif flag == 1: fig3 = plot.figure() for i in range(len(Img)): plot.subplot(3, 3, i + 1) plot.imview(util.tiledict(Img[i]), title='', fig=fig3) fig3.savefig(path + '\\' + file + '.png') elif flag == 2: fig4 = plot.figure() for i in range(len(Img)): plot.subplot(3, 3, i + 1) plot.imview(util.tiledict(Img[i]), title='', fig=fig4) fig4.savefig(path + '\\' + file + '.png') elif flag == 3: fig_t = plot.figure() for i in range(9): plot.subplot(1, 1, 1) plot.imview(util.tiledict(Img), title='', fig=fig_t) fig_t.savefig(path + '\\' + file + '.png')
def train_models(solvers, train_loader, args): """Train for all solvers.""" dname = args.dataset if not args.use_gray else args.dataset+'.gray' masks = [] shs = [] for e, blob in enumerate(train_loader): mask = su.rndmask(blob.shape, args.noise_fraction, dtype=blob.dtype) blobw = blob * mask if not args.dont_pad_boundary: pad = [(0, args.patch_size-1), (0, args.patch_size-1)] + \ [(0, 0) for _ in range(blob.ndim-2)] blobw = np.pad(blobw, pad, 'constant') mask = np.pad(mask, pad, 'constant') # l2-TV denoising tvl2opt = tvl2.TVL2Denoise.Options({ 'Verbose': False, 'MaxMainIter': 200, 'gEvalY': False, 'AutoRho': {'Enabled': True}, 'DFidWeight': mask }) denoiser = tvl2.TVL2Denoise(blobw, args.l2_lambda, tvl2opt, caxis=None if args.use_gray else 2) sl = denoiser.solve() sh = mask * (blobw - sl) # save masks and sh masks.append(mask) shs.append(sh) # Update solvers for k, solver in solvers.items(): solver.solve(sh, W=mask) np.save(os.path.join(args.output_path, k, '{}.{}.npy'.format(dname, e)), solver.getdict().squeeze()) if args.visdom is not None: tiled_dict = su.tiledict(solver.getdict().squeeze()) if not args.use_gray: tiled_dict = tiled_dict.transpose(2, 0, 1) args.visdom.image(tiled_dict, opts=dict(caption=f'{k}.{e}')) # snapshot blobs and masks masks = np.concatenate(masks, axis=-1) shs = np.concatenate(shs, axis=-1) np.save(os.path.join(args.output_path, 'train_masks.npy'), masks) np.save(os.path.join(args.output_path, 'train_blobs.npy'), shs) return solvers, shs, masks
def train_models(D0, solvers, train_blob, args): """Function for training every solvers.""" epochs = args.epochs if args.epochs > 0 else None batch_size = args.batch_size if args.batch_size > 0 else None loader = BlobLoader(train_blob, epochs, batch_size) sample = loader.random_sample() solvers = { k: sol_name(D0, sample, args.lmbda, opt=opt) for k, (sol_name, opt) in solvers.items() } dname = args.dataset if not args.use_gray else args.dataset + '.gray' for e, blob in enumerate(loader): if args.pad_boundary: assert args.patch_size % 2 == 1, 'Patch size should be odd' radius = args.patch_size // 2 pad = [(radius, radius), (radius, radius)] + \ [(0, 0) for _ in range(blob.ndim-2)] # pad = [(0, args.patch_size-1), (0, args.patch_size-1)] + \ # [(0, 0) for _ in range(blob.ndim-2)] blob = np.pad(blob, pad, mode='constant') if not args.no_tikhonov_filter: # fix lambda to be 5 _, blob = su.tikhonov_filter(blob, 5.) for k, s in solvers.items(): s.solve(blob.copy()) path = os.path.join(args.output_path, k) np.save(os.path.join(path, '{}.{}.npy'.format(dname, e)), s.getdict().squeeze()) if args.visdom is not None: tiled_dict = su.tiledict(s.getdict().squeeze()) if not args.use_gray: tiled_dict = tiled_dict.transpose(2, 0, 1) args.visdom.image(tiled_dict, opts=dict(caption=f'{k}.{e}')) # snapshot iteration record sio.savemat(os.path.join(args.output_path, 'iter_record.mat'), {'iter_record': loader.record}) return solvers
} }) d2 = cbpdndl.ConvBPDNMaskDcplDictLearn(D0, sh, lmbda, W, opt2) D2 = d2.solve() """ Reconstruct from the CDL solution with a spatial mask. """ sr2 = d2.reconstruct().squeeze() + sl """ Compare dictionaries. """ fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D1.squeeze()), title='Without Mask Decoupling', fig=fig) plot.subplot(1, 2, 2) plot.imview(util.tiledict(D2.squeeze()), title='With Mask Decoupling', fig=fig) fig.show() """ Display reference and training images. """ fig = plot.figure(figsize=(14, 14)) plot.subplot(2, 2, 1) plot.imview(S[..., 0], title='Reference', fig=fig) plot.subplot(2, 2, 2) plot.imview(Sw[..., 0], title='Test', fig=fig) plot.subplot(2, 2, 3)
'Enabled': True }, 'ZeroMean': True } }) # Run optimisation d = cbpdndl.ConvBPDNDictLearn(D0, vh, lmbda, opt, dimK=0, dimN=3) D1 = d.solve() print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) # Display central temporal slice (index 2) of dictionaries D1 = D1.squeeze() fig1 = plot.figure(1, figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0[..., 1, :]), fgrf=fig1, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1[..., 1, :]), fgrf=fig1, title='D1') fig1.show() # Plot functional value and residuals its = d.getitstat() fig2 = plot.figure(2, figsize=(21, 7)) plot.subplot(1, 3, 1) plot.plot(its.ObjFun, fgrf=fig2, xlbl='Iterations', ylbl='Functional') plot.subplot(1, 3, 2) plot.plot(np.vstack((its.XPrRsdl, its.XDlRsdl, its.DPrRsdl, its.DDlRsdl)).T, fgrf=fig2, ptyp='semilogy', xlbl='Iterations', ylbl='Residual',
Highpass filter example image. """ npd = 16 fltlmbd = 10 sl, sh = signal.tikhonov_filter(img, fltlmbd, npd) """ Load dictionary and display it. """ D = util.convdicts()['G:12x12x36'] # Repeat the dictionary twice, adding noise to each respective pair D = np.append(D + 0.01 * np.random.randn(*D.shape), D + 0.01 * np.random.randn(*D.shape), axis=-1) plot.imview(util.tiledict(D), fgsz=(10, 10)) """ Set :class:`.admm.cbpdnin.ConvBPDNInhib` solver options. """ lmbda = 5e-2 mu = 5e-2 opt = cbpdnin.ConvBPDNInhib.Options({ 'Verbose': True, 'MaxMainIter': 200, 'RelStopTol': 5e-3, 'AuxVarObj': False }) """ Initialise and run CSC solver. """
scaled=True, gray=True, idxexp=np.s_[160:416, 60:316]) """ Highpass filter example image. """ npd = 16 fltlmbd = 10 sl, sh = util.tikhonov_filter(img, fltlmbd, npd) """ Load dictionary and display it. """ D = util.convdicts()['G:12x12x36'] plot.imview(util.tiledict(D), fgsz=(7, 7)) """ Set :class:`.admm.cbpdn.ConvMinL1InL2Ball` solver options. """ epsilon = 3.4e0 opt = cbpdn.ConvMinL1InL2Ball.Options({ 'Verbose': True, 'MaxMainIter': 200, 'HighMemSolve': True, 'LinSolveCheck': True, 'RelStopTol': 5e-3, 'AuxVarObj': False, 'rho': 50.0, 'AutoRho': { 'Enabled': False
#畳み込み辞書学習の実行 lmbda0 = 0.5; opt0 = cbpdndl.ConvBPDNDictLearn.Options({'Verbose': True, 'MaxMainIter': 400, 'CBPDN': {'rho': 50.0*lmbda0 + 0.5, 'NonNegCoef': True}, 'CCMOD': {'rho': 10.0, 'ZeroMean': True}}, dmethod='cns'); d0 = cbpdndl.ConvBPDNDictLearn(D00, train_data, lmbda0, opt0, dmethod='cns'); D01 = d0.solve(); print("ConvBPDNDictimport numpy as np;Learn solve time: %.2fs" % d0.timer.elapsed('solve')); #学習済み辞書と辞書の初期値の比較 fig = plot.figure(figsize=(20, 10)); ax1 = fig.add_subplot(121); ax2 = fig.add_subplot(122); plot.imview(util.tiledict(D00), fig=fig, ax=ax1, title="initial dictionary"); plot.imview(util.tiledict(D01.squeeze()), fig=fig, ax=ax2, title="learned dictionary"); #coef ---> スパースなマップ (x, y, 枚数, channel) coef0 = np.array(d0.getcoef().squeeze()); #%% print(np.sum(coef0<0)) #%%入力とフィルタ、及びスパースマップの可視化 fig = plot.figure(figsize=(18, 6)); ax1 = fig.add_subplot(131); ax2 = fig.add_subplot(132); ax3 = fig.add_subplot(133); plot.imview(train_data[:, :, 5], fig=fig, ax=ax1, title="input digit"); plot.imview(util.tiledict(D01.squeeze()), fig=fig, ax=ax2, title="filters"); plot.imview(util.tiledict(coef0[:, :, 5, :]), fig=fig,ax=ax3, title="sparse map(response)");
xstep = cbpdn.ConvBPDN(D0n, sh, lmbda, optx) # Create D update object dstep = ccmod.ConvCnstrMOD(None, sh, D0.shape, optd) # Create DictLearn object opt = dictlrn.DictLearn.Options({'Verbose': True, 'MaxMainIter': 100}) d = dictlrn.DictLearn(xstep, dstep, opt) D1 = d.solve() print("DictLearn solve time: %.2fs" % d.runtime, "\n") # Display dictionaries D1 = D1.squeeze() fig1 = plot.figure(1, figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0), fgrf=fig1, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1), fgrf=fig1, title='D1') fig1.show() # Plot functional value and residuals itsx = xstep.getitstat() itsd = dstep.getitstat() fig2 = plot.figure(2, figsize=(21, 7)) plot.subplot(1, 3, 1) plot.plot(itsx.ObjFun, fgrf=fig2, xlbl='Iterations', ylbl='Functional') plot.subplot(1, 3, 2) plot.plot(np.vstack( (itsx.PrimalRsdl, itsx.DualRsdl, itsd.PrimalRsdl, itsd.DualRsdl)).T, fgrf=fig2, ptyp='semilogy',
""" d = parcnsdl.ConvBPDNDictLearn_Consensus(D0, sh, lmbda, opt) D1 = d.solve() print("ConvBPDNDictLearn_Consensus solve time: %.2fs" % d.timer.elapsed('solve')) """ Display initial and final dictionaries. """ D1 = D1.squeeze() fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0), fig=fig, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1), fig=fig, title='D1') fig.show() """ Get iterations statistics from solver object and plot functional value """ its = d.getitstat() plot.plot(its.ObjFun, xlbl='Iterations', ylbl='Functional') # Wait for enter on keyboard input()
""" Reconstruct from the CDL solution with a spatial mask. """ sr2 = d2.reconstruct().squeeze() + sl """ Compare dictionaries. """ fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D1.squeeze()), title='Without Mask Decoupling', fig=fig) plot.subplot(1, 2, 2) plot.imview(util.tiledict(D2.squeeze()), title='With Mask Decoupling', fig=fig) fig.show() """ Display reference and training images. """ fig = plot.figure(figsize=(14, 14)) plot.subplot(2, 2, 1) plot.imview(S[...,0], title='Reference', fig=fig) plot.subplot(2, 2, 2)
""" npd = 16 fltlmbd = 10 sl, sh = signal.tikhonov_filter(img, fltlmbd, npd) """ Load dictionary and display it. """ D = util.convdicts()['G:12x12x36'] # Repeat the dictionary three times, adding noise to each repetition D = np.concatenate( (D + 0.01 * np.random.randn(*D.shape), D + 0.01 * np.random.randn(*D.shape), D + 0.01 * np.random.randn(*D.shape)), axis=-1) plot.imview(util.tiledict(D), fgsz=(9, 8)) """ Set :class:`.admm.cbpdnin.ConvBPDNInhib` solver options. """ lmbda = 5e-2 mu = 5e-3 # if 'RegLat' diverges, lower mu opt = cbpdnin.ConvBPDNInhib.Options({ 'Verbose': True, 'MaxMainIter': 200, 'RelStopTol': 5e-3, 'AuxVarObj': False }) """ Initialise and run CSC solver. """
Create solver object and solve. """ d = cbpdndl.ConvBPDNDictLearn(D0, vh, lmbda, opt, dimK=0, dimN=3) D1 = d.solve() print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) """ Display initial and final dictionaries: central temporal slice """ D1 = D1.squeeze() fig = plot.figure(figsize=(14,7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0[...,2,:]), fig=fig, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1[...,2,:]), fig=fig, title='D1') fig.show() """ Display initial and final dictionaries: central spatial vertical slice """ D1 = D1.squeeze() fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0[2]), fig=fig, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1[2]), fig=fig, title='D1')
'L': 50, 'StepSizePolicy': StepSizePolicyBB() }) c2 = cmod.CnstrMOD(X, S, None, opt) D12 = c2.solve() print("CMOD solve time: %.2fs" % c2.timer.elapsed('solve')) """ Display initial and final dictionaries. """ D0 = D0.reshape((8, 8, D0.shape[-1])) D11 = D11.reshape((8, 8, D11.shape[-1])) D12 = D12.reshape((8, 8, D12.shape[-1])) fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 3, 1) plot.imview(util.tiledict(D0), title='D0', fig=fig) plot.subplot(1, 3, 2) plot.imview(util.tiledict(D11), title='D1 Cauchy', fig=fig) plot.subplot(1, 3, 3) plot.imview(util.tiledict(D12), title='D1 BB', fig=fig) fig.show() """ Get iterations statistics from CMOD solver object and plot functional value, residuals, and automatically adjusted L against the iteration number. """ its1 = c1.getitstat() its2 = c2.getitstat() fig = plot.figure(figsize=(20, 5)) plot.subplot(1, 3, 1) plot.plot(its1.DFid, xlbl='Iterations', ylbl='Functional', fig=fig) plot.plot(its2.DFid,
def test_05(self): D = np.random.randn(8, 8, 64) im = util.tiledict(D, sz=((6, 6, 32), (8, 8, 32)))
'CBPDN': { 'rho': 50.0 * lmbda + 0.5 }, 'CCMOD': { 'rho': 10.0, 'ZeroMean': True } }, dmethod='cns') d = cbpdndl.ConvBPDNDictLearn(D0, sh, lmbda, opt, dmethod='cns') D1 = d.solve() print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) D1 = D1.squeeze() fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0), title='D0', fig=fig) plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1), title='D1', fig=fig) fig.show() its = d.getitstat() fig = plot.figure(figsize=(20, 5)) plot.subplot(1, 3, 1) plot.plot(its.ObjFun, xlbl='Iterations', ylbl='Functional', fig=fig) plot.subplot(1, 3, 2) plot.plot(np.vstack((its.XPrRsdl, its.XDlRsdl, its.DPrRsdl, its.DDlRsdl)).T, ptyp='semilogy', xlbl='Iterations', ylbl='Residual', lgnd=['X Primal', 'X Dual', 'D Primal', 'D Dual'], fig=fig) plot.subplot(1, 3, 3)
def test_03(self): D = np.random.randn(64, 64) im = util.tiledict(D, sz=(8, 8))
def solve(self, S): self.cri = cr.CSC_ConvRepIndexing(self.getdict(), S, dimK=self.cri.dimK, dimN=self.cri.dimN) self.timer.start(['solve', 'solve_wo_eval']) # Initialize with CBPDN self.timer.start('xstep') copt = copy.deepcopy(self.opt['CBPDN']) if self.opt['OCDL', 'CUCBPDN']: X = np.stack([ cucbpdn.cbpdn(self.getdict(), S[..., i].squeeze(), self.lmbda, opt=copt) for i in range(S.shape[-1]) ], axis=-2) X = np.asarray(X.reshape(self.cri.shpX), dtype=self.dtype) elif self.opt['OCDL', 'PARCBPDN']: popt = parcbpdn.ParConvBPDN.Options(dict(self.opt['CBPDN'])) xstep = parcbpdn.ParConvBPDN(self.getdict(), S, self.lmbda, opt=popt, nproc=self.opt['OCDL', 'nproc']) X = xstep.solve() X = np.asarray(X.reshape(self.cri.shpX), dtype=self.dtype) else: xstep = cbpdn.ConvBPDN(self.getdict(), S, self.lmbda, opt=copt) xstep.solve() X = np.asarray(xstep.getcoef().reshape(self.cri.shpX), dtype=self.dtype) self.timer.stop('xstep') # X = np.asarray(xstep.getcoef().reshape(self.cri.shpX), dtype=self.dtype) S = np.asarray(S.reshape(self.cri.shpS), dtype=self.dtype) # update At and Bt # (H, W, 1, K, M) -> (H, W, Hc, Wc, 1, K, M) self.timer.start('hessian') Xe = self.extend_code(X) self.update_At(Xe) self.update_Bt(Xe, S) self.timer.stop('hessian') self.Lmbda = self.dtype.type(self.alpha*self.Lmbda+1) # update dictionary with FISTA fopt = copy.deepcopy(self.opt['CCMOD']) fopt['X0'] = self.D if self.opt['OCDL', 'DiminishingTol']: if self.opt['OCDL', 'MinTol'] is None: min_tol = 0. else: min_tol = self.opt['OCDL', 'MinTol'] fopt['RelStopTol'] = max( self.dtype.type(self.opt['CCMOD', 'RelStopTol']/(1.+self.j)), min_tol ) self.timer.start('dstep') dstep = SpatialFISTA(self.At, self.Bt, opt=fopt) dstep.solve() self.timer.stop('dstep') # set dictionary self.setdict(dstep.getmin()) self.timer.stop('solve_wo_eval') evl = self.evaluate(S, X) self.timer.start('solve_wo_eval') t = self.timer.elapsed(self.opt['IterTimer']) if self.opt['OCDL', 'CUCBPDN']: # this requires a slight modification of dictlrn itst = self.isc.iterstats(self.j, t, None, dstep.itstat[-1], evl) else: itst = self.isc.iterstats(self.j, t, xstep.itstat[-1], dstep.itstat[-1], evl) self.itstat.append(itst) if self.opt['Verbose']: self.isc.printiterstats(itst) self.j += 1 self.timer.stop(['solve', 'solve_wo_eval']) if 0: import matplotlib.pyplot as plt plt.imshow(su.tiledict(self.getdict().squeeze())) plt.show() return self.getdict()
}) """ Create solver object and solve. """ d = cbpdndl.ConvBPDNDictLearn(D0, vh, lmbda, opt, dimK=0, dimN=3) D1 = d.solve() print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) """ Display initial and final dictionaries: central temporal slice """ D1 = D1.squeeze() fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0[..., 2, :]), fgrf=fig, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1[..., 2, :]), fgrf=fig, title='D1') fig.show() """ Display initial and final dictionaries: central spatial vertical slice """ D1 = D1.squeeze() fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0[2]), fgrf=fig, title='D0') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1[2]), fgrf=fig, title='D1') fig.show() """
""" Highpass filter example image. """ npd = 16 fltlmbd = 10 sl, sh = util.tikhonov_filter(img, fltlmbd, npd) """ Load dictionary and display it. """ D = util.convdicts()['G:12x12x36'] plot.imview(util.tiledict(D), fgsz=(7, 7)) """ Set :class:`.admm.cbpdn.ConvBPDNProjL1` solver options. """ gamma = 4.05e2 opt = cbpdn.ConvBPDNProjL1.Options({'Verbose': True, 'MaxMainIter': 250, 'HighMemSolve': True, 'LinSolveCheck': False, 'RelStopTol': 5e-3, 'AuxVarObj': True, 'rho': 3e0, 'AutoRho': {'Enabled': True}}) """ Initialise and run CSC solver.
def test_01(self): D = np.random.randn(64, 64) im = util.tiledict(D, sz=(8, 8))
}, 'CCMOD': { 'ZeroMean': True } }) Wr = np.reshape(W, W.shape[0:3] + (W.shape[3], 1)) d2 = cbpdndl.ConvBPDNMaskDcplDictLearn(D0, shpw, lmbda, Wr, opt2) D2 = d2.solve() # Reconstruct from ConvBPDNMaskDcplDictLearn solution sr2 = d2.reconstruct()[:-Npr, :-Npc].squeeze() + sl # Compare dictionaries fig1 = plot.figure(1, figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D1.squeeze()), fgrf=fig1, title='Without Mask Decoupling') plot.subplot(1, 2, 2) plot.imview(util.tiledict(D2.squeeze()), fgrf=fig1, title='With Mask Decoupling') fig1.show() # Display reference and test images (with unmasked lowpass component) fig2 = plot.figure(2, figsize=(14, 14)) plot.subplot(2, 2, 1) plot.imview(S[..., 0], fgrf=fig2, title='Reference') plot.subplot(2, 2, 2) plot.imview(shpw[:-Npr, :-Npc, :, 0] + sl[..., 0], fgrf=fig2, title='Test') plot.subplot(2, 2, 3)
def solve(self, S): """Solve for given signal S.""" self.timer.start(['solve', 'solve_wo_eval']) cri = cr.CDU_ConvRepIndexing(self.dsz, S) S = np.asarray(S.reshape(cri.shpS), dtype=self.dtype) X = self.xinit(S) Y = X.copy() G = self.D.copy() D = self.D.copy() S = S.squeeze(-1).transpose(3, 2, 0, 1) tx = td = 1. # MaxMainIter gives no of iterations for each sample. for self.j in range(self.j, self.j + self.opt['MaxMainIter']): Xprev, Dprev = X.copy(), D.copy() Y, X, G, D, tx, td = self.step(S, Y, X, G, D, tx, td, self.opt['FISTA', 'L'], self.opt['CCMOD', 'L']) tx = (1 + np.sqrt(1 + 4 * tx**2)) / 2. td = (1 + np.sqrt(1 + 4 * td**2)) / 2. self.timer.stop('solve_wo_eval') X_Rsdl = linalg.norm(Y - Xprev) D_Rsdl = linalg.norm(G - Dprev) recon = self.slices2im(np.matmul(G, Y)) dfd = linalg.norm(recon - S)**2 / 2. reg = linalg.norm(Y.ravel(), 1) obj = dfd + self.lmbda * reg cnstr = linalg.norm(self.dprox(G) - G) dtx = { 'L': self.opt['FISTA', 'L'], 'Rsdl': X_Rsdl, 'F_Btrack': None, 'Q_Btrack': None, 'IterBTrack': None } dtd = { 'L': self.opt['CCMOD', 'L'], 'Rsdl': D_Rsdl, 'Cnstr': cnstr, 'F_Btrack': None, 'IterBTrack': None, 'Q_Btrack': None } evl = {'ObjFun': obj, 'DFid': dfd, 'RegL1': reg} if not self.opt['AccurateDFid']: dtx.update(evl) evl = None self.timer.start('solve_wo_eval') self.D = G self.X = Y t = self.timer.elapsed(self.opt['IterTimer']) itst = self.isc.iterstats(self.j, t, dtx, dtd, evl) if self.opt['Verbose']: self.isc.printiterstats(itst) if self.opt['Callback'] is not None: if self.opt['Callback'](self): break if 0: import matplotlib.pyplot as plt plt.imshow(su.tiledict(self.getdict().squeeze())) plt.show() self.j += 1 self.timer.stop(['solve', 'solve_wo_eval']) if self.opt['Verbose'] and self.opt['StatusHeader']: self.isc.printseparator() return self.getdict()
img_index = np.random.randint(0, sh.shape[-1]) d.solve(sh[..., [img_index]]) d.display_end() D1 = d.getdict() print("OnlineConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve')) """ Display initial and final dictionaries. """ D1 = D1.squeeze() fig = plot.figure(figsize=(14, 7)) plot.subplot(1, 2, 1) plot.imview(util.tiledict(D0), title='D0', fig=fig) plot.subplot(1, 2, 2) plot.imview(util.tiledict(D1), title='D1', fig=fig) fig.show() """ Get iterations statistics from solver object and plot functional value. """ its = d.getitstat() fig = plot.figure(figsize=(7, 7)) plot.plot(np.vstack((its.DeltaD, its.Eta)).T, xlbl='Iterations', lgnd=('Delta D', 'Eta'), fig=fig) fig.show()
def test_03(self): D = np.random.randn(8, 8, 3, 64) im = util.tiledict(D)
def test_06(self): D = np.random.randn(8, 8, 3, 64) im = util.tiledict(D)