Пример #1
0
def bart(nargout, cmd, *args):

    if type(nargout) != int or nargout < 0:
        print("Usage: bart(<nargout>, <command>, <arguements...>)")
        return None

    bart_path = os.environ['TOOLBOX_PATH'] + '/bart '

    if not bart_path:
        if os.path.isfile('/usr/local/bin/bart'):
            bart_path = '/usr/local/bin'
        elif os.path.isfile('/usr/bin/bart'):
            bart_path = '/usr/bin'
        else:
            raise Exception('Environment variable TOOLBOX_PATH is not set.')

    name = tmp.NamedTemporaryFile().name

    nargin = len(args)
    infiles = [name + 'in' + str(idx) for idx in range(nargin)]
    in_str = ' '.join(infiles)

    for idx in range(nargin):
        cfl.writecfl(infiles[idx], args[idx])

    outfiles = [name + 'out' + str(idx) for idx in range(nargout)]
    out_str = ' '.join(outfiles)

    #TODO: Windows option.
    ERR = os.system(bart_path + '/bart ' + cmd + ' ' + in_str + ' ' + out_str)

    for elm in infiles:
        if os.path.isfile(elm + '.cfl'):
            os.remove(elm + '.cfl')
        if os.path.isfile(elm + '.hdr'):
            os.remove(elm + '.hdr')

    output = []
    for idx in range(nargout):
        elm = outfiles[idx]
        if not ERR:
            output.append(cfl.readcfl(elm))
        if os.path.isfile(elm + '.cfl'):
            os.remove(elm + '.cfl')
        if os.path.isfile(elm + '.hdr'):
            os.remove(elm + '.hdr')

    if ERR:
        raise Exception("Command exited with an error.")

    if nargout == 1:
        output = output[0]

    return output
Пример #2
0
def bart(cmd='', nargout=0, *args):

    if not cmd or not nargout:
        print "Usage: bart <command> <arguments...>";
        return None

    bart_path = os.environ['TOOLBOX_PATH'] + '/bart ';

    if not bart_path:
        if os.path.isfile('/usr/local/bin/bart'):
            bart_path = '/usr/local/bin'
        elif os.path.isfile('/usr/bin/bart'):
            bart_path = '/usr/bin'
        else:
            raise Exception('Environment variable TOOLBOX_PATH is not set.')

    name = tmp.NamedTemporaryFile().name

    nargin = len(args);
    infiles = [name + 'in' + str(idx) for idx in range(nargin)]
    in_str = ' '.join(infiles)

    for idx in range(nargin):
        cfl.writecfl(infiles[idx], args[idx])

    outfiles = [name + 'out' + str(idx) for idx in range(nargout)]
    out_str = ' '.join(outfiles)

    #TODO: Windows option.
    ERR = os.system(bart_path + '/bart ' + cmd + ' ' + in_str + ' ' + out_str);

    for elm in infiles:
        if os.path.isfile(elm + '.cfl'):
            os.remove(elm + '.cfl')
        if os.path.isfile(elm + '.hdr'):
            os.remove(elm + '.hdr')

    output = []
    for idx in range(nargout):
        elm = outfiles[idx]
        if not ERR:
            output.append(cfl.readcfl(elm))
        if os.path.isfile(elm + '.cfl'):
            os.remove(elm + '.cfl')
        if os.path.isfile(elm + '.hdr'):
            os.remove(elm + '.hdr')

    if ERR:
        raise Exception("Command exited with an error.")

    if nargout == 1:
        output = output[0]

    return output
Пример #3
0
    def __init__(self, infile, para, TI, outfile):
        self.infile = sys.argv[1]
        self.para = sys.argv[2]
        self.TIfile = sys.argv[3]
        self.outfile = sys.argv[4]

        self.oridata = np.array(cfl.readcfl(
            self.infile).squeeze())  #dim = [x, y, time, slice]
        self.TI = np.array(cfl.readcfl(self.TIfile).squeeze())

        a = np.mean(self.oridata[:, :, -1])

        self.oridata = 1.0 * self.oridata / a

        self.map = self.getmap(np.abs(self.oridata), self.TI)

        cfl.writecfl(self.outfile, self.map)

        print("Ellapsed time: " + str(end - start) + " s")
Пример #4
0
base_dir = '/home/ohadsh/work/data/SchizReg/24_05_2016/'
file_names = ['k_space_real_gt', 'k_space_imag_gt']
mini_batch = 50
tt = 'train'
data_set = KspaceDataSet(base_dir, file_names, stack_size=50, shuffle=False)
data_set_tt = getattr(data_set, tt)

data = data_set_tt.next_batch(mini_batch, norm=False)

k_space_real_gt = data["k_space_real_gt"][0,:,:]
k_space_imag_gt = data["k_space_imag_gt"][0,:,:]
k_space_tuple = (np.zeros_like(k_space_real_gt) + 0j).astype('complex64')
k_space_tuple.real = k_space_real_gt
k_space_tuple.imag = k_space_imag_gt
cfl.writecfl('example', k_space_tuple);
# A = cfl.readcfl('xyz');


# for i in range(0,100):

#     data = data_set_tt.next_batch(mini_batch, norm=False)
#     k_space_real_gt = data["k_space_real_gt"][i,:,:]
#     k_space_imag_gt = data["k_space_imag_gt"][i,:,:]
#     org_image = get_image_from_kspace(k_space_real_gt, k_space_imag_gt)

#     # Generating random mask
#     mask = get_random_gaussian_mask(im_shape=(256, 256), peak_probability=0.7, std=45.0, keep_center=0.05)
    
    
#     reduction = np.sum(mask) / float(mask.ravel().shape[0])
def main(args):

    # create data directories if they don't already exist
    if not os.path.exists(args.output_path):
        os.makedirs(args.output_path)
    if not os.path.exists(os.path.join(args.output_path, 'train')):
        os.makedirs(os.path.join(args.output_path, 'train'))
    if not os.path.exists(os.path.join(args.output_path, 'validate')):
        os.makedirs(os.path.join(args.output_path, 'validate'))
    if not os.path.exists(os.path.join(args.output_path, 'test')):
        os.makedirs(os.path.join(args.output_path, 'test'))

    # determine splits manually for now...
    train_exams = [
        'Exam2323', 'Exam3330', 'Exam3331', 'Exam3332', 'Exam3410', 'Exam3411',
        'Exam3412', 'Exam4873', 'Exam4874', 'Exam4905', 'Exam5003', 'Exam2406'
    ]
    val_exams = ['Exam2200', 'Exam5050']
    test_exams = []
    all_exams = train_exams + val_exams + test_exams

    # figure out data splits
    num_train = len(train_exams)
    num_validate = len(val_exams)
    num_test = len(test_exams)
    num_cases = num_train + num_validate + num_test

    # how many cardiac phases to simulate
    num_phases_list = [20]

    for exam_name in all_exams:
        exam_path = os.path.join(args.input_path, exam_name)
        series_list = os.listdir(exam_path)

        if args.verbose:
            print("Processing %s..." % exam_name)

        for num_phases in num_phases_list:

            for series_name in series_list:
                series_path = os.path.join(exam_path, series_name,
                                           'Phases%d' % num_phases)

                num_slices = len(glob.glob('%s/ks_*.cfl' % series_path))
                kspace = [None] * num_slices
                maps = [None] * num_slices
                target = [None] * num_slices

                if args.verbose:
                    print("  %s (%d slices)..." % (series_name, num_slices))

                for sl in range(num_slices):
                    # loading k-space data
                    file_ks = "ks_%02d" % sl
                    ks_slice = cfl.readcfl(os.path.join(series_path, file_ks))
                    ks_slice = np.transpose(np.squeeze(ks_slice), [0, 1, 3, 2])

                    # process slice to get images and sensitivity maps
                    kspace[sl], maps[sl], target[sl] = process_slice(
                        ks_slice, args)

                # Stack volume
                kspace = np.stack(kspace, axis=0)
                maps = np.stack(maps, axis=0)
                target = np.stack(target, axis=0)

                # Determine path to new hdf5 file
                if exam_name in train_exams:
                    folder = 'train'
                elif exam_name in val_exams:
                    folder = 'validate'
                else:
                    folder = 'test'

                # write out HDF5 file for entire volume
                h5_name = "%s_%s_Phases%02d.h5" % (exam_name, series_name,
                                                   num_phases)
                filename = os.path.join(args.output_path, folder, h5_name)
                with h5py.File(filename, 'w') as hf:
                    hf.create_dataset('kspace', data=kspace)
                    hf.create_dataset('maps', data=maps)
                    hf.create_dataset('target', data=target)

                if args.dbwrite:
                    print('Writing out files to home folder!')
                    cfl.writecfl('/home/sandino/kspace', kspace)
                    cfl.writecfl('/home/sandino/maps', maps)
                    cfl.writecfl('/home/sandino/images', target)

    return
Пример #6
0
#!/usr/bin/env python3
import numpy as np
import cfl
import sys


#print(str(sys.argv))

pat = np.zeros((1,192,192))

#generate CAIPIRINHA pattern:


ACS = int(sys.argv[1])
US  = int(sys.argv[2])
if len(sys.argv) > 3:
    CAIPI_SHIFT = int(sys.argv[3])
else:
    CAIPI_SHIFT = US//2

for i in range(0,192,US):
    f = ((i//US % US) * CAIPI_SHIFT) % US

    pat[0,i,f::US] = 1

pat[0,192//2-ACS//2:192//2+ACS//2, 192//2-ACS//2:192//2+ACS//2] = 1 

out='data/pat_' + str(US)
cfl.writecfl(out, pat.transpose((0,2,1)))
os.environ["TOOLBOX_PATH"] = toolbox_path
sys.path.append(os.path.join(toolbox_path, 'python'))
import bart, cfl

# blocking params
block_size = 16
block_stride = 16

# test dataset
filename = '/data/sandino/Cine/validate/Exam2200_Series5_Phases20.h5'

slice = 0  # pick slice
with h5py.File(filename, 'r') as data:
    orig_images = data['target'][slice]

# Convert numpy array to tensor
images = cplx.to_tensor(orig_images).unsqueeze(0)
_, nx, ny, nt, nmaps, _ = images.shape

# Initialize blocking operator
block_op = T.ArrayToBlocks(block_size, images.shape, overlapping=True)

blocks = block_op(images)
images = block_op(blocks, adjoint=True)
images = images.squeeze(0)

# Write out images
images = cplx.to_numpy(images)
cfl.writecfl('block_input', orig_images)
cfl.writecfl('block_output', images)
cfl.writecfl('block_error', orig_images - images)
Пример #8
0
def main(out_name, in_name):
    input = cfl.readcfl(in_name)
    cfl.writecfl(input, out_name)
    # cfl.writecfl(out_name, input)
    return 0
Пример #9
0
#!/usr/bin/env python3

import numpy as np
import cfl
import argparse

parser = argparse.ArgumentParser(description='Undersample cfl')
parser.add_argument('dim',
                    metavar='D',
                    type=int,
                    action='store',
                    help='Dimension')
parser.add_argument('acc', metavar='acc', type=int, help='Acceleration factor')
parser.add_argument('input', metavar='IN', type=str, help='Input cfl')
parser.add_argument('output', metavar='OUT', type=str, help='Output cfl')

args = parser.parse_args()

in_ = cfl.readcfl(args.input)

tmp = np.moveaxis(in_, args.dim, 0)

out_ = np.moveaxis(tmp[::args.acc, ...], 0, args.dim)

cfl.writecfl(args.output, out_)
rawdata = h5_dataset.get(h5_dataset_rawdata_name).value

[dummy, nFE, nSpokes, nCh] = rawdata.shape

#%% Display rawdata and trajectory
plt.figure(1)
plt.imshow(np.log(1 + np.abs(rawdata[0, :, :, 0])), cmap="gray")
plt.axis('off')
plt.title('rawdata coil 1')

#%% Subsample
#R = 2
#trajectory = trajectory[:,:,1::R]
#rawdata = rawdata[:,:,1::R,:]
#[dummy,nFE,nSpokes,nCh] = rawdata.shape

#%%  Demo: NUFFT reconstruction with BART
# inverse gridding
img_igrid = bart(1, 'nufft -i -t', trajectory, rawdata)
cfl.writecfl("heart_igrid", img_igrid)

# channel combination
img_igrid_sos = bart(1, 'rss 8', img_igrid)
img_igrid_sos = np.abs(img_igrid_sos)

#%% Display results
plt.figure(2)
plt.imshow(np.fliplr(np.flipud(img_igrid_sos)), cmap="gray")
plt.axis('off')
plt.title('Regridding SOS reconstruction')
Пример #11
0
def main():
    # ARGS
    input_data_path = '/mnt/dense/data_public/fastMRI/multicoil_val'
    output_data_path = '/mnt/raid3/sandino/fastMRI/validate_full'
    center_fraction = 0.04  # number of k-space lines used to do ESPIRiT calib
    num_emaps = 1
    dbwrite = False

    input_files = glob.glob(os.path.join(input_data_path, '*.h5'))

    for file in input_files:
        # Load HDF5 file
        hf = h5py.File(file, 'r')
        # existing keys: ['ismrmrd_header', 'kspace', 'reconstruction_rss']

        # load k-space and image data from HDF5 file
        kspace_orig = hf['kspace'][()]
        im_rss = hf['reconstruction_rss'][()]  # (33, 320, 320)

        # get data dimensions
        num_slices, num_coils, num_kx, num_ky = kspace_orig.shape
        xres, yres = im_rss.shape[1:3]  # matrix size
        num_low_freqs = int(round(center_fraction * yres))

        # allocate memory for new arrays
        im_shape = (xres, yres)
        kspace = np.zeros((num_slices, xres, yres, num_coils),
                          dtype=np.complex64)
        maps = np.zeros((num_slices, xres, yres, num_coils, num_emaps),
                        dtype=np.complex64)
        im_truth = np.zeros((num_slices, xres, yres, num_emaps),
                            dtype=np.complex64)

        for sl in range(num_slices):
            kspace_slice = np.transpose(kspace_orig[sl], axes=[1, 2, 0])
            kspace_slice = kspace_slice[:, :, None, :]

            # Data dimensions for BART:
            #  kspace - (kx, ky, 1, coils)
            #  maps - (kx, ky, 1, coils, emaps)
            # Data dimensions for PyTorch:
            #  kspace - (1, kx, ky, coils, real/imag)
            #  maps   - (1, kx, ky, coils, emaps, real/imag)

            # Pre-process k-space data (PyTorch)
            kspace_tensor = cplx.to_tensor(
                np.transpose(kspace_slice, axes=[2, 0, 1,
                                                 3]))  # (1, 640, 372, 15, 2)
            image_tensor = T.ifft2(kspace_tensor)
            print(image_tensor.size())
            image_tensor = cplx.center_crop(image_tensor, im_shape)
            kspace_tensor = T.fft2(image_tensor)
            kspace_slice = np.transpose(cplx.to_numpy(kspace_tensor),
                                        axes=[1, 2, 0, 3])

            # Compute sensitivity maps (BART)
            maps_slice = bart.bart(
                1, f'ecalib -d 0 -m {num_emaps} -c 0.1 -r {num_low_freqs}',
                kspace_slice)
            maps_slice = np.reshape(maps_slice,
                                    (xres, yres, 1, num_coils, num_emaps))
            maps_tensor = cplx.to_tensor(
                np.transpose(maps_slice, axes=[2, 0, 1, 3, 4]))

            # Do coil combination using sensitivity maps (PyTorch)
            A = T.SenseModel(maps_tensor)
            im_tensor = A(kspace_tensor, adjoint=True)

            # Convert image tensor to numpy array
            im_slice = cplx.to_numpy(im_tensor)

            # Re-shape and save everything
            kspace[sl] = np.reshape(kspace_slice, (xres, yres, num_coils))
            maps[sl] = np.reshape(maps_slice,
                                  (xres, yres, num_coils, num_emaps))
            im_truth[sl] = np.reshape(im_slice, (xres, yres, num_emaps))

        # write out new hdf5
        file_new = os.path.join(output_data_path, os.path.split(file)[-1])
        with h5py.File(file_new, 'w') as hf_new:
            # create datasets within HDF5
            hf_new.create_dataset('kspace', data=kspace)
            hf_new.create_dataset('maps', data=maps)
            hf_new.create_dataset('reconstruction_espirit', data=im_truth)
            hf_new.create_dataset('reconstruction_rss',
                                  data=im_rss)  # provided by fastMRI
            hf_new.create_dataset('ismrmrd_header', data=hf['ismrmrd_header'])

            # create attributes (metadata)
            for key in hf.attrs.keys():
                hf_new.attrs[key] = hf.attrs[key]

        if dbwrite:
            hf_new = h5py.File(file_new, 'r')
            print('Keys:', list(hf_new.keys()))
            print('Attrs:', dict(hf_new.attrs))
            cfl.writecfl('/home/sandino/maps', hf_new['maps'][()])
            cfl.writecfl('/home/sandino/kspace', hf_new['kspace'][()])
            cfl.writecfl('/home/sandino/im_truth',
                         hf_new['reconstruction_rss'][()])
            cfl.writecfl('/home/sandino/im_recon',
                         hf_new['reconstruction_espirit'][()])
# Extract spatial patches across images
patches = block_op(images)
np = patches.shape[0]

# Reshape into batch of 2D matrices
patches = patches.permute(0, 1, 2, 4, 3, 5)
patches = patches.reshape((np, ne * blk_size**2, nt, 2))

# Perform SVD to get left and right singular vectors
U, S, V = cplx.svd(patches, compute_uv=True)

# Truncate singular values and corresponding singular vectors
U = U[:, :, :nb, :]  # [N, Px*Py*E, B, 2]
S = S[:, :nb]  # [N, B]
V = V[:, :, :nb, :]  # [N, T, B, 2]

# Combine and reshape matrices
S_sqrt = S.reshape((np, 1, 1, 1, 1, nb, 1)).sqrt()
L = U.reshape((np, blk_size, blk_size, 1, ne, nb, 2)) * S_sqrt
R = V.reshape((np, 1, 1, nt, 1, nb, 2)) * S_sqrt
blocks = torch.sum(cplx.mul(L, cplx.conj(R)), dim=-2)

images = block_op(blocks, adjoint=True)

# Write out images
images = cplx.to_numpy(images.squeeze(0))
cfl.writecfl('svdinit_input', orig_images)
cfl.writecfl('svdinit_output', images)
cfl.writecfl('svdinit_error', orig_images - images)
Пример #13
0
    osc_spell2 = osc_sin2

    # Trend
    trend = (np.exp(t / np.max(t) * 3))
    trend = trend / np.max(trend) * trend_amp
    trend -= np.max(trend) / 2

    osc_trend1 = osc_sin1
    osc_trend2 = osc_sin2

    # Add up
    a1 = (c + 1) / nChannels
    a2 = (nChannels - c) / nChannels

    k_sin[:, c] = a1 * osc_sin1 + a2 * osc_sin2
    k_noise[:, c] = (a1 * osc_noise1 + a2 * osc_noise2) + noise
    k_spell[:, c] = (a1 * osc_spell1 + a2 * osc_spell2) + spell
    k_trend[:, c] = (a1 * osc_trend1 + a2 * osc_trend2) + trend

writecfl("osc_sin1", osc_sin1)
writecfl("osc_sin2", osc_sin2)
writecfl("sin", np.zeros(shape=(nSamples)))  # for bash-script to run poperly
writecfl("noise", noise)
writecfl("trend", trend)
writecfl("spell", spell)

writecfl("k_sin", k_sin)
writecfl("k_noise", k_noise)
writecfl("k_spell", k_spell)
writecfl("k_trend", k_trend)
Пример #14
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import h5py
import os
import os.path
import cfl

with os.scandir(os.path.curdir) as dirit:
    for f in dirit:
        prefix = 'rawdata_'
        if f.name.endswith(".h5"):
            #remove rawdata_ prefix and .h5 extension
            if f.name.startswith(prefix):
                fstrip = f.name[len(prefix):-3]
            else:
                fstrip = f.name[:-3]
            h5_dataset = h5py.File(f, 'r')
            outdir = fstrip + "_cfl"
            os.makedirs(outdir, exist_ok=True)
            for key in list(h5_dataset.keys()):
                keydata = h5_dataset.get(key)[()]
                cfl.writecfl(os.path.join(outdir, key), keydata)
Пример #15
0
def main(out_name, in_name):
	input = cfl.readcfl(in_name)
	cfl.writecfl(input, out_name)
	# cfl.writecfl(out_name, input)
	return 0
 # Convert numpy array to tensor
images = cplx.to_tensor(orig_images).unsqueeze(0)
_, nx, ny, nt, ne, _ = images.shape

# Initialize lists
glr_images = [None] * len(num_basis)
glr_error = [None] * len(num_basis)
llr_images = [None] * len(num_basis)
llr_error = [None] * len(num_basis)

for i in range(len(num_basis)):
	# Use globally low-rank model to compress images
	glr_images[i] = glr_compress(images, num_basis[i])
	glr_error[i] = images - glr_images[i]

	# Use locally low-rank model to compress images
	llr_images[i] = llr_compress(images, num_basis[i], blk_size, overlapping)
	llr_error[i] = images - llr_images[i]

glr_images = torch.cat(glr_images, axis=2).squeeze(0)
glr_error = torch.cat(glr_error, axis=2).squeeze(0)
llr_images = torch.cat(llr_images, axis=2).squeeze(0)
llr_error = torch.cat(llr_error, axis=2).squeeze(0)

# Write out images
cfl.writecfl('svd_glr_images', cplx.to_numpy(glr_images).swapaxes(0,1))
cfl.writecfl('svd_glr_error', cplx.to_numpy(glr_error).swapaxes(0,1))
cfl.writecfl('svd_llr_images', cplx.to_numpy(llr_images).swapaxes(0,1))
cfl.writecfl('svd_llr_error', cplx.to_numpy(llr_error).swapaxes(0,1))