Exemple #1
0
    def get(self,varnames):
        '''Get variables from MATLAB workspace into python as numpy arrays.

        varnames -- List of names of variables in MATLAB workspace to get.

        Notice that varnames should be a list of strings.
        '''

        if type(varnames) is not list:
            try:
                varnames = list(varnames)
            except:
                raise ValueError('varnames should be a list of variable names!')

        tmp_filename = NamedTemporaryFile(suffix='.mat').name
        cmd = "save('%s',%s)" % (tmp_filename,','.join([ "'%s'" % vname for vname in varnames ]))
        # logging.info(cmd)
        self.run(cmd)

        try:
            data = load_mat(tmp_filename)
        except:
            raise ValueError('Was not able to read MATLAB workspace variables.')

        vals = { key: data[key] for key in varnames }
        return(vals)
Exemple #2
0
def mat_keys(filename, ignore_dbl_underscored=True, no_print=False):
    '''Give the keys found in a .mat.

    Parameters
    ----------
    filename : str
        .mat filename.
    ignore_dbl_underscored : bool, optional
        Remove keys beginng with two underscores.
    no_print : bool, optional
        Don't print out they keys.

    Returns
    -------
    keys : list
        Keys present in dictionary of read in .mat file.
    '''

    data = load_mat(filename)
    keys = list(data.keys())

    if ignore_dbl_underscored:
        keys = [x for x in keys if not x.startswith('__')]

    if not no_print:
        print('Keys: ', keys)

    return keys
Exemple #3
0
 def test_gx_gy_data():
     path = str(
         Path('mr_utils/test_data/tests/gridding/scgrog/test_gx_gy_data.mat'
              ).resolve())
     data = load_mat(path)
     Gxm = data['Gx']
     Gym = data['Gy']
     return (Gxm, Gym)
Exemple #4
0
 def gx_gy_results():
     path = str(
         Path('mr_utils/test_data/tests/gridding/scgrog/gx_gy_results.mat').
         resolve())
     data = load_mat(path)
     Gxm = data['officialGx']
     Gym = data['officialGy']
     return (Gxm, Gym)
Exemple #5
0
 def TV_re_order():
     path = str(
         Path('mr_utils/test_data/tests/recon/reordering/TV_re_order.mat').
         resolve())
     data = load_mat(path)
     a = data['TV_term_reorder_update_real']
     b = data['TV_term_reorder_update_imag']
     return (a, b)
Exemple #6
0
 def grog_result():
     path = str(
         Path('mr_utils/test_data/tests/gridding/scgrog/grog_result.mat').
         resolve())
     data = load_mat(path)
     kspacem = data['officialCartesianKSpace']
     maskm = data['officialKMask']
     return (kspacem, maskm)
Exemple #7
0
 def test_grog_data_4D():
     path = str(
         Path(
             'mr_utils/test_data/tests/gridding/scgrog/test_grog_data_4D.mat'
         ).resolve())
     data = load_mat(path)
     traj = data['testTrajectory3D']
     kspace = data['testData4D']
     return (kspace, traj)
Exemple #8
0
 def test_gridder_data_4D():
     path = str(
         Path(
             'mr_utils/test_data/tests/gridding/scgrog/test_gridder_data_4D.mat'
         ).resolve())
     data = load_mat(path, 'KSpaceData')
     kspace = data['kSpace'][0][0]
     traj = data['trajectory'][0][0]
     cartdims = tuple(list(data['cartesianSize'][0][0][0]))
     return (kspace, traj, cartdims)
Exemple #9
0
def client_get(varnames,host=None,port=None,bufsize=None):
    '''Get variables from remote MATLAB workspace into python as numpy arrays.

    varnames -- List of names of variables in MATLAB workspace to get.
    host -- host/ip-address of server running MATLAB.
    port -- port of host to connect to.
    bufsize -- Number of bytes to transmit/recieve at a time.

    Notice that varnames should be a list of strings.
    '''

    if type(varnames) is not list:
        try:
            varnames = list(varnames)
        except:
            raise ValueError('varnames should be a list of variable names!')

    sock,host,port,bufsize = get_socket(host,port,bufsize)

    # Connect to server and send data
    try:
        sock.connect((host,port))
        sock.sendall(('%s\n' % GET).encode()) # tell host what we want to do
        sock.sendall(('%d\n' % bufsize).encode()) # tell host bufsize

        # make varnames a space separated list, then send
        sock.sendall((' '.join(varnames) + '\n').encode())

        # Get ready to recieve file
        tmp_filename = NamedTemporaryFile().name
        with open(tmp_filename,'wb') as f:
            done = False
            while not done:
                received = sock.recv(bufsize)
                if bytes(done_token,'utf-8') in received:
                    received = received[:-len(done_token)]
                    done = True
                f.write(received)

        # Now load transfered MAT file into memory
        try:
            data = load_mat(tmp_filename)
            vals = { key: data[key] for key in varnames }
        except:
            raise ValueError('Was not able to read MATLAB workspace variables.')

    finally:
        sock.close()

    logging.info('Received variables!')
    return(vals)
Exemple #10
0
    def true_orderings():
        path = str(
            Path('mr_utils/test_data/tests/recon/reordering/true_orderings.mat'
                 ).resolve())

        # offset by 1 since MATLAB is 1-based indexing
        orderings = load_mat(path)
        sort_order_real_x = orderings['sort_order_real_x'] - 1
        sort_order_imag_x = orderings['sort_order_imag_x'] - 1
        sort_order_real_y = orderings['sort_order_real_y'] - 1
        sort_order_imag_y = orderings['sort_order_imag_y'] - 1

        return (sort_order_real_x, sort_order_imag_x, sort_order_real_y,
                sort_order_imag_y)
Exemple #11
0
def mat_keys(filename, ignore_dbl_underscored=True, no_print=False):
    '''Give the keys found in a .mat filcoil_ims,coil_dim=-1,n_components=4e.

    filename -- .mat filename.
    ignore_dbl_underscored -- Remove keys beginng with two underscores.
    '''

    data = load_mat(filename)
    keys = list(data.keys())

    if ignore_dbl_underscored:
        keys = [x for x in keys if not x.startswith('__')]

    if not no_print:
        print('Keys: ', keys)

    return (keys)
Exemple #12
0
def view(image,
         load_opts=None,
         is_raw=None,
         is_line=None,
         prep=None,
         fft=False,
         fft_axes=None,
         fftshift=None,
         avg_axis=None,
         coil_combine_axis=None,
         coil_combine_method='walsh',
         coil_combine_opts=None,
         is_imspace=False,
         mag=None,
         phase=False,
         log=False,
         imshow_opts={'cmap': 'gray'},
         montage_axis=None,
         montage_opts={'padding_width': 2},
         movie_axis=None,
         movie_interval=50,
         movie_repeat=True,
         save_npy=False,
         debug_level=logging.DEBUG,
         test_run=False):
    '''Image viewer to quickly inspect data.

    Parameters
    ----------
    image : str or array_like
        Name of the file including the file extension or numpy array.
    load_opts : dict, optional
        Options to pass to data loader.
    is_raw : bool, optional
        Inform if data is raw. Will attempt to guess from extension.
    is_line : bool, optional
        Whether or not this is a line plot (as opposed to image).
    prep : callable, optional
        Lambda function to process the data before it's displayed.
    fft : bool, optional
        Whether or not to perform n-dimensional FFT of data.
    fft_axes : tuple, optional
        Axis to perform FFT over, determines dimension of n-dim FFT.
    fftshift : bool, optional
        Whether or not to perform fftshift. Defaults to True if fft.
    avg_axis : int, optional
        Take average over given set of axes.
    coil_combine_axis : int, optional
        Which axis to perform coil combination over.
    coil_combine_method : {'walsh', 'inati', 'pca'}, optional
        Method to use to combine coils.
    coil_combine_opts : dict, optional
        Options to pass to the coil combine method.
    is_imspace : bool, optional
        Whether or not the data is in image space. For coil combine.
    mag : bool, optional
        View magnitude image. Defaults to True if data is complex.
    phase : bool, optional
        View phase image.
    log : bool, optional
        View log of magnitude data. Defaults to False.
    imshow_opts : dict, optional
        Options to pass to imshow. Defaults to { 'cmap'='gray' }.
    montage_axis : int, optional
        Which axis is the number of images to be shown.
    montage_opts : dict, optional
        Additional options to pass to the skimage.util.montage.
    movie_axis : int, optional
        Which axis is the number of frames of the movie.
    movie_interval : int, optional
        Interval to give to animation frames.
    movie_repeat : bool, optional
        Whether or not to put movie on endless loop.
    save_npy : bool, optional
        Whether or not to save the output as npy file.
    debug_level : logging_level, optional
        Level of verbosity. See logging module.
    test_run : bool, optional
        Doesn't show figure, returns debug object. Mostly for testing.

    Returns
    -------
    data : array_like
        Image data shown in plot.
    dict, optional
        All local variables when test_run=True.

    Raises
    ------
    Exception
        When file type is not in ['dat', 'npy', 'mat', 'h5'].
    ValueError
        When coil combine requested, but fft_axes not set.
    AssertionError
        When Walsh coil combine requested but len(fft_axes) =/= 2.
    ValueError
        When there are too many dimension to display.
    '''

    # Set up logging...
    logging.basicConfig(format='%(levelname)s: %(message)s', level=debug_level)

    # Add some default empty params
    if load_opts is None:
        load_opts = dict()
    if coil_combine_opts is None:
        coil_combine_opts = dict()

    # If the user wants to look at numpy matrix, recognize that
    # filename is the matrix:
    if isinstance(image, np.ndarray):
        logging.info('Image is a numpy array!')
        data = image
    elif isinstance(image, list):
        # If user sends a list, try casting to numpy array
        logging.info('Image is a list, trying to cast as numpy array...')
        data = np.array(image)
    else:
        # Find the file extension
        ext = pathlib.Path(image).suffix

        # If the user says data is raw, then trust the user
        if is_raw or (ext == '.dat'):
            data = load_raw(image, **load_opts)
        elif ext == '.npy':
            data = np.load(image, **load_opts)
        elif ext == '.mat':
            # Help out the user a little bit...  If only one
            # nontrivial key is found then go ahead and assume it's
            # that one
            data = None
            if not list(load_opts):
                keys = mat_keys(image, no_print=True)
                if len(keys) == 1:
                    logging.info(('No key supplied, but one key for'
                                  ' mat dictionary found (%s), using'
                                  ' it...'), keys[0])
                    data = load_mat(image, key=keys[0])

            # If we can't help the user out, just load it as normal
            if data is None:
                data = load_mat(image, **load_opts)
        elif ext == '.h5':
            data = load_ismrmrd(image, **load_opts)
        else:
            raise Exception('File type %s not understood!' % ext)

    # Right off the bat, remove singleton dimensions
    if 1 in data.shape:
        logging.info('Current shape %s: Removing singleton dimensions...',
                     str(data.shape))
        data = data.squeeze()
        logging.info('New shape: %s', str(data.shape))

    # Average out over any axis specified
    if avg_axis is not None:
        data = np.mean(data, axis=avg_axis)

    # Let's collapse the coil dimension using the specified algorithm
    if coil_combine_axis is not None:

        # We'll need to know the fft_axes if the data is in kspace
        if not is_imspace and fft_axes is None:
            msg = ('fft_axes required to do coil combination of '
                   'k-space data!')
            raise ValueError(msg)

        if coil_combine_method == 'walsh':
            msg = 'Walsh only works with 2D images!'
            assert len(fft_axes) == 2, msg
            logging.info('Performing Walsh 2d coil combine across axis %d...',
                         list(range(data.ndim))[coil_combine_axis])

            # We need to do this is image domain...
            if not is_imspace:
                fft_data = np.fft.ifftshift(np.fft.ifftn(data, axes=fft_axes),
                                            axes=fft_axes)
            else:
                fft_data = data

            # walsh expects (coil,y,x)
            fft_data = np.moveaxis(fft_data, coil_combine_axis, 0)
            csm_walsh, _ = calculate_csm_walsh(fft_data, **coil_combine_opts)
            fft_data = np.sum(csm_walsh * np.conj(fft_data),
                              axis=0,
                              keepdims=True)

            # Sum kept the axis where coil used to be so we can rely
            # on fft_axes to be correct when do the FT back to kspace
            fft_data = np.moveaxis(fft_data, 0, coil_combine_axis)

            # Now move back to kspace and squeeze the dangling axis
            if not is_imspace:
                data = np.fft.fftn(np.fft.fftshift(fft_data, axes=fft_axes),
                                   axes=fft_axes).squeeze()
            else:
                data = fft_data.squeeze()

        elif coil_combine_method == 'inati':

            logging.info('Performing Inati coil combine across axis %d...',
                         list(range(data.ndim))[coil_combine_axis])

            # Put things into image space if we need to
            if not is_imspace:
                fft_data = np.fft.ifftshift(np.fft.ifftn(data, axes=fft_axes),
                                            axes=fft_axes)
            else:
                fft_data = data

            # inati expects (coil,z,y,x)
            fft_data = np.moveaxis(fft_data, coil_combine_axis, 0)
            _, fft_data = calculate_csm_inati_iter(fft_data,
                                                   **coil_combine_opts)

            # calculate_csm_inati_iter got rid of the axis, so we
            # need to add it back in so we can use the same fft_axes
            fft_data = np.expand_dims(fft_data, coil_combine_axis)

            # Now move back to kspace and squeeze the dangling axis
            if not is_imspace:
                data = np.fft.fftn(np.fft.fftshift(fft_data, axes=fft_axes),
                                   axes=fft_axes).squeeze()
            else:
                data = fft_data.squeeze()

        elif coil_combine_method == 'pca':
            logging.info('Performing PCA coil combine across axis %d...',
                         list(range(data.ndim))[coil_combine_axis])

            # We don't actually care whether we do this is in kspace
            # or imspace
            if not is_imspace:
                logging.info(('PCA doesn\'t care that image might not be in'
                              'image space.'))

            if 'n_components' not in coil_combine_opts:
                n_components = int(data.shape[coil_combine_axis] / 2)
                logging.info('Deciding to use %d components.', n_components)
                coil_combine_opts['n_components'] = n_components

            data = coil_pca(data,
                            coil_dim=coil_combine_axis,
                            **coil_combine_opts)

        else:
            logging.error('Coil combination method "%s" not supported!',
                          coil_combine_method)
            logging.warning('Attempting to skip coil combination!')

    # Show the image.  Let's also try to help the user out again.  If
    # we have 3 dimensions, one of them is probably a montage or a
    # movie.  If the user didn't tell us anything, it's going to
    # crash anyway, so let's try guessing what's going on...
    if (data.ndim > 2) and (movie_axis is None) and (montage_axis is None):
        logging.info('Data has %d dimensions!', data.ndim)

        # We will always assume that inplane resolution is larger
        # than the movie/montage dimensions

        # If only 3 dims, then one must be montage/movie dimension
        if data.ndim == 3:
            # assume inplane resolution larger than movie/montage dim
            min_axis = np.argmin(data.shape)

            # Assume 10 is the most we'll want to montage
            if data.shape[min_axis] < 10:
                logging.info('Guessing axis %d is montage...', min_axis)
                montage_axis = min_axis
            else:
                logging.info('Guessing axis %d is movie...', min_axis)
                movie_axis = min_axis

        # If 4 dims, guess smaller dim will be montage, larger guess
        # movie
        elif data.ndim == 4:
            montage_axis = np.argmin(data.shape)

            # Consider the 4th dimension as the color channel in
            # skimontage
            montage_opts['multichannel'] = True

            # Montage will go through skimontage which will remove the
            # montage_axis dimension, so find the movie dimension
            #  without the montage dimension:
            tmp = np.delete(data.shape[:], montage_axis)
            movie_axis = np.argmin(tmp)

            logging.info(('Guessing axis %d is montage, axis %d will be '
                          'movie...'), montage_axis, movie_axis)

    # fft and fftshift will require fft_axes.  If the user didn't
    # give us axes, let's try to guess them:
    if (fft or (fftshift is not False)) and (fft_axes is None):
        all_axes = list(range(data.ndim))

        if (montage_axis is not None) and (movie_axis is not None):
            fft_axes = np.delete(
                all_axes, [all_axes[montage_axis], all_axes[movie_axis]])
        elif montage_axis is not None:
            fft_axes = np.delete(all_axes, all_axes[montage_axis])
        elif movie_axis is not None:
            fft_axes = np.delete(all_axes, all_axes[movie_axis])
        else:
            fft_axes = all_axes

        logging.info('User did not supply fft_axes, guessing %s...',
                     str(fft_axes))

    # Perform n-dim FFT across fft_axes if desired
    if fft:
        data = np.fft.fftn(data, axes=fft_axes)

    # Perform fftshift if desired.  If the user does not specify
    # fftshift, if fft is performed, then fftshift will also be
    # performed.  To override this behavior, simply supply
    # fftshift=False in the arguments.  Similarly, to force fftshift
    # even if no fft was performed, supply fftshift=True.
    if fft and (fftshift is None):
        fftshift = True
    elif fftshift is None:
        fftshift = False

    if fftshift:
        data = np.fft.fftshift(data, axes=fft_axes)

    # Take absolute value to view if necessary, must take abs before
    # log
    if np.iscomplexobj(data) or (mag is True) or (log is True):
        data = np.abs(data)

        if log:
            # Don't take log of 0!
            data[data == 0] = np.nan
            data = np.log(data)

    # If we asked for phase, let's work out how we'll do that
    if phase and ((mag is None) or (mag is True)):
        # TODO: figure out which axis to concatenate the phase onto
        data = np.concatenate((data, np.angle(data)), axis=fft_axes[-1])
    elif phase and (mag is False):
        data = np.angle(data)

    # Run any processing before imshow
    if callable(prep):
        data = prep(data)

    # If it's just a line plot, skip all the montage, movie stuff
    if is_line:
        montage_axis = None
        movie_axis = None

    if montage_axis is not None:
        # We can deal with 4 dimensions if we allow multichannel
        if data.ndim == 4 and 'multichannel' not in montage_opts:
            montage_opts['multichannel'] = True

            # When we move the movie_axis to the end, we will need to
            # adjust the montage axis in case we displace it.  We
            # need to move it to the end so skimontage will consider
            # it the multichannel
            data = np.moveaxis(data, movie_axis, -1)
            if movie_axis < montage_axis:
                montage_axis -= 1

        # Put the montage axis in front
        data = np.moveaxis(data, montage_axis, 0)
        try:
            data = skimontage(data, **montage_opts)
        except ValueError:
            # Multichannel might be erronously set
            montage_opts['multichannel'] = False
            data = skimontage(data, **montage_opts)

        if data.ndim == 3:
            # If we had 4 dimensions, we just lost one, so now we
            # need to know where the movie dimension went off to...
            if movie_axis > montage_axis:
                movie_axis -= 1
            # Move the movie axis back, it's no longer the color
            # channel
            data = np.moveaxis(data, -1, movie_axis)

    if movie_axis is not None:
        fig = plt.figure()
        data = np.moveaxis(data, movie_axis, -1)
        im = plt.imshow(data[..., 0], **imshow_opts)

        def updatefig(frame):
            '''Animation function for figure.'''
            im.set_array(data[..., frame])
            return im,  # pylint: disable=R1707

        _ani = animation.FuncAnimation(fig,
                                       updatefig,
                                       frames=data.shape[-1],
                                       interval=movie_interval,
                                       blit=True,
                                       repeat=movie_repeat)

        if not test_run:
            plt.show()
    else:
        if data.ndim == 1 or is_line:
            plt.plot(data)
        elif data.ndim == 2:
            # Just a regular old 2d image...
            plt.imshow(np.nan_to_num(data), **imshow_opts)
        else:
            raise ValueError('%d is too many dimensions!' % data.ndim)

        if not test_run:
            plt.show()

    # Save what we looked at if desired
    if save_npy:
        if ext:
            filename = image
        else:
            filename = 'view-output'
        np.save(filename, data)

    # If we're testing, return all the local vars
    if test_run:
        return locals()
    return data
Exemple #13
0
        '''Sparsifying transform, discrete cosine transform.'''
        return dct(x, norm='ortho')

    def inverse_dct(self, x):
        '''Inverse sparsifying transform, DCT.'''
        return idct(x, norm='ortho')


if __name__ == '__main__':

    # Load in a data set
    filename = ('/home/nicholas/Documents/research/reordering_data/'
                'STCR_72_rays/Trio/P010710/meas_MID42_CV_Radial7Off_'
                'triple_2.9ml_FID242_GROG.mat')

    data = load_mat(filename, 'Image')
    view(data)
    sx, sy, st, _ = data.shape[:]
    tcurves = data[..., 0].reshape((sx * sy, st))
    view(tcurves[(sx * 133 + 130):(sx * 133 + 150), :])
    print(tcurves.shape)

    # Pick one pixel we know is going to have a nice time curve
    pt = (133, 130)
    px = data[pt[0], pt[1], :, 0]
    # view(px)

    # # Get orderings for real and imaginary parts
    # sr = Sparsify(px.real)
    # si = Sparsify(px.imag)
    # k = 1
Exemple #14
0
 def w24():
     path = str(
         Path('mr_utils/test_data/tests/recon/ssfp/w24.mat').resolve())
     return (load_mat(path, key='w2'))
Exemple #15
0
'''Show how we perform with an actual blood time curve.'''

import numpy as np
import matplotlib.pyplot as plt

from mr_utils.load_data import load_mat
from mr_utils.utils import Sparsify
from mr_utils import view
from mr_utils.cs import relaxed_ordinator

if __name__ == '__main__':

    filename = 'meas_MID42_CV_Radial7Off_triple_2.9ml_FID242_GROG.mat'
    data = load_mat(filename, key='Image')
    # view(data)

    pt = (133, 130)
    px = data[pt[0], pt[1], :, 0]
    pxr = px.real/np.max(np.abs(px.real))
    pxi = px.imag/np.max(np.abs(px.imag))
    Sr = Sparsify(pxr)
    Si = Sparsify(pxi)

    # plt.plot(px.real)
    # plt.plot(px.imag)
    # plt.title('Real/Imag time curve')
    # plt.show()

    # # Try Finite Differences
    # plt.plot(Sr.forward_fd(px.real))
    # plt.plot(Si.forward_fd(px.imag))
Exemple #16
0
 def I_max_mag():
     path = str(
         Path(
             'mr_utils/test_data/tests/recon/ssfp/I_max_mag.mat').resolve())
     return (load_mat(path, key='maximum'))
Exemple #17
0
 def CS():
     path = str(
         Path('mr_utils/test_data/tests/recon/ssfp/CS.mat').resolve())
     return (load_mat(path, key='CS'))
Exemple #18
0
 def recon_at_iter_50():
     path = str(
         Path(
             'mr_utils/test_data/tests/recon/reordering/recon_at_iter_50.mat'
         ).resolve())
     return (load_mat(path, 'img_est'))
Exemple #19
0
 def Id():
     path = str(
         Path('mr_utils/test_data/tests/recon/ssfp/Id.mat').resolve())
     return (load_mat(path, key='M'))
Exemple #20
0
 def TV_term_update():
     path = str(
         Path('mr_utils/test_data/tests/recon/reordering/TV_term_update.mat'
              ).resolve())
     return (load_mat(path, 'TV_term_update'))
Exemple #21
0
 def fidelity_update():
     path = str(
         Path(
             'mr_utils/test_data/tests/recon/reordering/fidelity_update.mat'
         ).resolve())
     return (load_mat(path, 'fidelity_update'))
Exemple #22
0
'''Get indices for STR recon.'''

import numpy as np
from tqdm import tqdm

from mr_utils.load_data import load_mat
from mr_utils.cs import relaxed_ordinator
from mr_utils.utils import Sparsify

if __name__ == '__main__':

    # Load data
    x = load_mat('/home/nicholas/Downloads/Temporal_reordering/prior.mat',
                 key='prior')
    print(x.shape)

    rpi = np.zeros(x.shape, dtype=int)
    ipi = np.zeros(x.shape, dtype=int)
    for idx in tqdm(np.ndindex(x.shape[:2]), leave=False):
        ii, jj = idx[0], idx[1]

        xr = x[ii, jj, :].real/np.max(np.abs(x[ii, jj, :].real))
        xi = x[ii, jj, :].imag/np.max(np.abs(x[ii, jj, :].imag))
        Sr = Sparsify(xr)
        Si = Sparsify(xi)

        rpi[ii, jj, :] = relaxed_ordinator(
            xr, lam=.08, k=10, unsparsify=Sr.inverse_fd,
            transform_shape=(xr.size-1,))
        ipi[ii, jj, :] = relaxed_ordinator(
            xi, lam=0.1, k=13, unsparsify=Si.inverse_fd,
    i_coil_imspace = coil_imspace.imag / np.max(np.abs(coil_imspace.imag))
    r_recon_flipped = recon_flipped.real / np.max(np.abs(recon_flipped.real))
    i_recon_flipped = recon_flipped.imag / np.max(np.abs(recon_flipped.imag))

    # Comparisons
    MSE = 1 / 2 * (compare_mse(r_coil_imspace, r_recon_flipped) +
                   compare_mse(i_coil_imspace, i_recon_flipped))
    SSIM = compare_ssim(abs_coil_imspace, abs_recon_flipped)
    PSNR = compare_psnr(abs_coil_imspace, abs_recon_flipped)
    return (MSE, SSIM, PSNR)


if __name__ == '__main__':

    # Make a dynamic phantom
    mask = load_mat(
        'mr_utils/recon/reordering/temporal_tv/mask_k_space_sparse.mat')
    nt, nx, ny = mask.shape[:]

    circ = np.zeros(mask.shape, dtype='complex')
    radius = np.abs(np.sin(np.linspace(0, np.pi, nt)) * .7 + .1)
    for ii in range(nt):
        circ[ii, ...] = bssfp_2d_cylinder(dims=(nx, ny),
                                          radius=radius[ii],
                                          kspace=True)
    circ = np.fft.fftshift(circ, axes=(1, 2)).transpose((2, 1, 0))
    mask = mask.transpose(2, 1, 0)
    # view(mask*circ,fft=True)

    # Run Ganesh's temporal recon
    weight_fidelity = 1.0
    weight_temporal = 0.01
Exemple #24
0
 def tv_prior():
     path = str(
         Path('mr_utils/test_data/tests/recon/reordering/tv_prior.mat').
         resolve())
     return (load_mat(path, 'tv_prior'))
Exemple #25
0
 def mask():
     path = str(
         Path('mr_utils/test_data/tests/recon/reordering/mask.mat').resolve(
         ))
     return (load_mat(path, 'mask'))
Exemple #26
0
 def Coil1_data():
     path = str(
         Path('mr_utils/test_data/tests/recon/reordering/Coil1_data.mat').
         resolve())
     return (load_mat(path, 'Coil1'))