コード例 #1
0
ファイル: fft.py プロジェクト: kttien/CS279
def nyu_dataset(filename):
    hf = h5py.File(filename)
    print('Keys:', list(hf.keys()))
    print('Attrs:', dict(hf.attrs))
    volume_kspace = hf['kspace']
    print(volume_kspace[20].shape
          )  # Using slice number 20 of the volume of specific MRI scans
    slice_kspace = volume_kspace
    show_slices((slice_kspace), [20], cmap='gray')
    plt.imshow(np.log(np.abs(slice_kspace[20])))
    plt.show()

    # Data Wrangling to add dimension of 1 for number of coils as needed for following methods
    test = volume_kspace[20]
    test2 = test[np.newaxis, :, :]

    # Generating sensitivity maps using ESPIRiT method
    mps = mr.app.EspiritCalib(test2).run()
    pl.ImagePlot(mps, title='Sensitivity Maps Estimated by ESPIRiT')

    # Running SENSE reconstruction algorithm on partial k-space image
    lamda = 0.01
    img_sense = mr.app.SenseRecon(test2, mps, lamda=lamda).run()
    pl.ImagePlot(img_sense, title='SENSE Reconstruction')

    # Running L1 Wavelet Regularized Reconstruction on partial k-space, images inconclusive
    lamda = 0.005
    img_l1wav = mr.app.L1WaveletRecon(test2, mps, lamda).run()
    pl.ImagePlot(img_l1wav, title='L1 Wavelet Regularized Reconstruction')
コード例 #2
0
import sigpy as sp
import sigpy.mri as mr
import sigpy.mri.rf as rf
import numpy as np
import sigpy.plot as pl
import matplotlib

dim = 32
Nc = 8
img_shape = [dim, dim]
sens_shape = [Nc, dim, dim]
sens = mr.birdcage_maps(sens_shape)
pl.ImagePlot(sens)

fov = 0.55  # FOV in m
N = dim  # matrix size
gts = 6.4e-6  # hardware dwell time, s
gslew = 150  # gradient slew rate in mT/m/ms
gamp = 30  # maximum gradient amplitude in mT/m
densamp = 10000  # duration of full density sampling (in samples)
dentrans = 10000  # duration of transition from low-high density (in samples)
R = 1 / 2  # degree of undersampling of outer region of trajectory- let's oversample by a factor of 2
dx = 0.025  # in m
rewinder = False
# construct a trajectory
g, k, t, s = rf.spiral_arch(fov / R, dx, gts, gslew, gamp)

#Note that this trajectory is a spiral-out trajectory.
#We will simply time-reverse it to create a spiral-in.
k = np.flipud(k)
g = np.flipud(g)
コード例 #3
0
import numpy as np
import sigpy.plot as pl

name="../data/cg_img.npy"
name="./cg_img.npy"
img=np.load(name)

print(img.shape)

for t in range(len(img)):
    s = img[t, ..., ::-1]
    pl.ImagePlot(img[t, ..., ::-1], interpolation='lanczos')
コード例 #4
0
print(f'K-space dtype: {ksp.dtype}')
print(f'K-space (min, max): ({np.abs(ksp).min()}, {np.abs(ksp).max()})')
print(f'Coord shape: {coord.shape}')  # (na, ns, 2)
print(f'Coord shape: {coord.dtype}')
print(f'Coord (min, max): ({coord.min()}, {coord.max()})')

plt.ion()
f, ax = plt.subplots(1, 1)
ax.scatter(coord[:15, :, -1], coord[:15, :, -2])

# Use JSENSE to estimate sensitivity maps
mps = mr.app.JsenseRecon(ksp, coord=coord, device=device).run()

print(f'Shape of coil sensitivity maps: {mps.shape}')

pl.ImagePlot(mps)

# Primal dual hybrid gradient reconstruction
pdhg_app = mr.app.TotalVariationRecon(ksp,
                                      mps,
                                      lamda=lamda,
                                      coord=coord,
                                      max_iter=max_iter,
                                      device=device,
                                      save_objective_values=True)
print(f'Name of solver: {pdhg_app.alg_name}')
pdhg_img = pdhg_app.run()

print(f'Image shape: {pdhg_img.shape}')
print(f'Image dtype: {pdhg_img.dtype}')
コード例 #5
0
import sigpy      as sp
import sigpy.mri  as mr
import sigpy.plot as pl

from wshfl import WaveShuffling

# -------------------------------------------------------------------------------------------------------------------------- #

rdr = np.load('data/rdr.npy')
tbl = np.load('data/tbl.npy')
mps = np.load('data/mps.npy').T
psf = np.load('data/psf.npy').T
phi = np.load('data/phi.npy').T
mit = 300
sparse_repr = 'W'

# -------------------------------------------------------------------------------------------------------------------------- #

start = time.time()
Waffle = WaveShuffling(rdr, tbl, mps, psf, phi, spr=sparse_repr, lmb=1e-6, mit=mit, dev=0)
Waffle.run()
end = time.time()
print("Device used: " + str(Waffle.device) + ". Reconstruction took " + str(end - start) + " seconds.")

# -------------------------------------------------------------------------------------------------------------------------- #

pl.ImagePlot(Waffle.S.H(Waffle.res).squeeze(), x=1, y=2, z=0, hide_axes=False)

# -------------------------------------------------------------------------------------------------------------------------- #
コード例 #6
0
coord = xp.load(coord_file)


def show_data_info(data, name):
    print("{}: shape={}, dtype={}".format(name, data.shape, data.dtype))


dcf = (coord[..., 0]**2 + coord[..., 1]**2)**0.5
pl.ScatterPlot(coord, dcf, title='Density compensation')

show_data_info(ksp, "ksp")
show_data_info(coord, "coord")
show_data_info(dcf, "dcf")

img_grid = sp.nufft_adjoint(ksp * dcf, coord)
pl.ImagePlot(img_grid, z=0, title='Multi-channel Gridding')

#%% md

## Estimate sensitivity maps using JSENSE

# Here we use [JSENSE](https://onlinelibrary.wiley.com/doi/full/10.1002/mrm.21245) to estimate sensitivity maps.

#%%

mps = mr.app.JsenseRecon(ksp, coord=coord, device=device).run()

#%% md

## CG
コード例 #7
0
## FISTA

#%%

fista_app = mr.app.L1WaveletRecon(ksp,
                                  mps,
                                  lamda=lamda,
                                  coord=coord,
                                  device=device,
                                  max_iter=max_iter,
                                  save_objective_values=True)

fista_img = fista_app.run()

pl.ImagePlot(fista_img)

#%% md

## ADMM

#%%
'''
admm_app = mr.app.L1WaveletRecon(
        ksp, mps, solver='ADMM', lamda=lamda, coord=coord, device=device,
        max_iter=max_iter // max_cg_iter, max_cg_iter=max_cg_iter, save_objective_values=True)
admm_img = admm_app.run()

pl.ImagePlot(admm_img)
'''
コード例 #8
0
                if print_cost:
                    toc = time.time()
                    print('EPOCH = ', epoch, 'COST = ', minibatch_loss,
                          'Elapsed time = ', (toc - tic))

                if epoch % 200 == 0:
                    save_path = saver.save(sess,
                                           "model/model_maniflod_spiral.ckpt")
                    print("Model saved in file: %s" % save_path)
            Y_opt = np.array(
                sess.run(DECONV, feed_dict={
                    X: X_train,
                    Y: Y_train
                }))
            #Y_opt = Y_opt.eval(session = sess)
            sess.close()
    return Y_opt


Y_test = forward_model(
    X_train,
    Y_train,
    learning_rate=0.00001,
    num_epochs=1000,
    minibatch_size=2,  # should be < than the number of input examples
    print_cost=True)

pl.ImagePlot(Y_test)
np.save('data_spiral.npy', Y_test)
# Y_test = manifold_net(X_train).eval()
コード例 #9
0
W = sp.linop.Wavelet(img_shape)
wav = W * S.H * F.H * ksp
#pl.ImagePlot(wav**0.1, title=r'$W S^H F^H y$')

print(np.amax(np.abs(wav)))
print(np.amin(np.abs(wav)))
print(np.shape(wav))

plt.figure(1)
lala = ksp[0, :, :, 160]
print(np.shape(lala))
plt.imshow(np.abs(wav[:, :, 160]))
plt.clim(0.0001, 0.001)
plt.show()

pl.ImagePlot(wav, title=r'$W S^H F^H y$')

A = P * F * S * W.H

## Prox
print("Define Prox")

lamda = 0.005
proxg = sp.prox.L1Reg(wav.shape, lamda)
alpha = 1
wav_thresh = proxg(alpha, wav)

pl.ImagePlot(wav_thresh**0.1)

## Alg
print("Define Alg")
コード例 #10
0
ファイル: ReconTest.py プロジェクト: pengdada/kspace_precond
#coord = np.load(dir+'coord.npy').transpose((1,0,2))

ksp = np.load(dir+'ksp.npy')
coord = np.load(dir+'coord.npy')

ksp = ksp.transpose((2,1,0))
coord = coord.transpose((1,0,2))*96

print("estimate shape=", estimate_shape(coord))

#dcf = (coord[..., 0]**2 + coord[..., 1]**2+ coord[..., 2]**2)**0.5

show_data_info(ksp, "ksp")
show_data_info(coord, "coord")
#show_data_info(dcf, "dcf")

#ksp = np.stack((ksp.real, ksp.imag), axis=-1)
#ksp = np.stack((ksp.real, ksp.imag), axis=-1).astype(np.double)

mps = mr.app.JsenseRecon(ksp, coord=coord, device=device).run()
#mps = mr.app.JsenseRecon(ksp, device=device).run()

cg_app = mr.app.SenseRecon(
    ksp, mps, coord=coord, device=device, lamda=lamda,
    max_iter=max_iter, save_objective_values=True)
cg_img = cg_app.run()

np.save("cg_img.npy", cg_img)

pl.ImagePlot(cg_img)
#%%

mps = mr.app.JsenseRecon(ksp, coord=coord, device=device).run()

#%% md

## ADMM

#%%

admm_app = mr.app.TotalVariationRecon(
        ksp, mps, lamda=lamda, coord=coord, max_iter=max_iter // max_cg_iter,
        solver='ADMM', max_cg_iter=max_cg_iter, device=device, save_objective_values=True)
admm_img = admm_app.run()

pl.ImagePlot(admm_img)

#%% md

## ADMM with circulant preconditioner

#%%

rho = 1
circ_precond = mr.circulant_precond(mps, coord=coord, device=device, lamda=rho)

img_shape = mps.shape[1:]
G = sp.linop.FiniteDifference(img_shape)
g = G.H * G * sp.dirac(img_shape)
g = sp.fft(g)
g = sp.to_device(g, device=device)
コード例 #12
0
    mesh[:, :, 1] = m2
    return mesh.astype(np.float)


name = 'img.jpg'
image = Image.open(name).convert('L')

arr = np.array(image) + 1j
traj = cartisian2D(arr.shape, [1, 1], 1)
plt.ScatterPlot(traj, title='Trajectory')
image.close()
arr = arr / np.max(arr[...])

print(traj.shape)

kspaceNUFFT = sp.nufft(arr, traj)

plt.ImagePlot(np.log(kspaceNUFFT), title='k-space data from NUFFT')

kspaceFFT = sp.fft(arr)

plt.ImagePlot(np.log(kspaceFFT), title='k-space data from FFT')
print(kspaceFFT.shape)
print(kspaceNUFFT.shape)
sumNUFFT = np.sum(kspaceNUFFT)
sumFFT = np.sum(kspaceFFT)
if (np.allclose(kspaceNUFFT, kspaceFFT, rtol=10, atol=10)
        and np.isclose(sumNUFFT, sumFFT, rtol=50, atol=50)):
    print('Outputs are  similar!')
else:
    print('Outputs are NOT similar!')