Пример #1
0
def cs_total_variation(args, kspace, acquisition, acceleration, num_low_freqs):
    """
    Run ESPIRIT coil sensitivity estimation and Total Variation Minimization based
    reconstruction algorithm using the BART toolkit.
    """

    if acquisition not in REG_PARAM[args.challenge]:
        raise ValueError(f'Invalid acquisition protocol: {acquisition}')
    if acceleration not in {4, 8}:
        raise ValueError(f'Invalid acceleration factor: {acceleration}')

    if args.challenge == 'singlecoil':
        kspace = kspace.unsqueeze(0)
    kspace = kspace.permute(1, 2, 0, 3).unsqueeze(0)
    kspace = tensor_to_complex_np(kspace)

    # Estimate sensitivity maps
    sens_maps = bart.bart(1, f'ecalib -d0 -m1 -r {num_low_freqs}', kspace)

    # Use Total Variation Minimization to reconstruct the image
    reg_wt = REG_PARAM[args.challenge][acquisition][acceleration]
    pred = bart.bart(1, f'pics -d0 -S -R T:7:0:{reg_wt} -i {args.num_iters}',
                     kspace, sens_maps)
    pred = torch.from_numpy(np.abs(pred[0]))

    # Crop the predicted image to selected resolution if bigger
    smallest_width = min(args.resolution, pred.shape[-1])
    smallest_height = min(args.resolution, pred.shape[-2])
    return transforms.center_crop(pred, (smallest_height, smallest_width))
Пример #2
0
def cs_total_variation(args, kspace, reg_wt, crop_size, num_low_freqs):
    """
    Run ESPIRIT coil sensitivity estimation and Total Variation Minimization
    based reconstruction algorithm using the BART toolkit.

    Args:
        args (argparse.Namespace): Arguments including ESPIRiT parameters.
        reg_wt (float): Regularization parameter.
        crop_size (tuple): Size to crop final image to.
    
    Returns:
        np.array: Reconstructed image.
    """
    if args.challenge == "singlecoil":
        kspace = kspace.unsqueeze(0)

    kspace = kspace.permute(1, 2, 0, 3).unsqueeze(0)
    kspace = tensor_to_complex_np(kspace)

    # estimate sensitivity maps
    if num_low_freqs is None:
        sens_maps = bart.bart(1, f"ecalib -d0 -m1", kspace)
    else:
        sens_maps = bart.bart(1, f"ecalib -d0 -m1 -r {num_low_freqs}", kspace)

    # use Total Variation Minimization to reconstruct the image
    pred = bart.bart(1, f"pics -d0 -S -R T:7:0:{reg_wt} -i {args.num_iters}",
                     kspace, sens_maps)
    pred = torch.from_numpy(np.abs(pred[0]))

    # check for FLAIR 203
    if pred.shape[1] < crop_size[1]:
        crop_size = (pred.shape[1], pred.shape[1])

    return T.center_crop(pred, crop_size)
Пример #3
0
def get_coil_combine_funs(N, v='-v0'):
    '''Return list of functions that perform coil combination.'''

    fft = lambda x0, ax=(1, 2): np.fft.fftshift(
        np.fft.fft2(np.fft.fftshift(x0, axes=ax), axes=ax), axes=ax)
    ifft = lambda x0, ax=(1, 2): np.fft.fftshift(
        np.fft.ifft2(np.fft.fftshift(x0, axes=ax), axes=ax), axes=ax)

    return (
        [
            # Walsh
            lambda x0: np.sum(walsh(x0)[0].conj() * x0, axis=0),

            # Inati
            lambda x0: inati(x0)[1],

            # PCA (imspace)
            lambda x0: coil_pca(x0, coil_dim=0, n_components=1),

            # PCA (kspace)
            lambda x0: ifft(coil_pca(
                fft(x0, ax=(1, 2)), coil_dim=0, n_components=1),
                            ax=(1, 2)),

            # Direct method
            lambda x0: np.sum(np.moveaxis(
                bart(1, 'caldir %d' % int(N / 2),
                     fft(np.moveaxis(x0, 0, -1)[:, :, None, :], ax=(0, 1))).
                squeeze(), -1, 0).conj() * x0,
                              axis=0),

            # Geometric
            lambda x0: ifft(bart(
                1, 'cc -p 1 -A -G',
                fft(np.moveaxis(x0, 0, -1)[:, :, None, :], ax=(0, 1))),
                            ax=(0, 1)).squeeze(),

            # # ESPIRiT -- using cc, don't use this one!
            # lambda x0: ifft(bart(1, 'cc -p 1 -A -E', fft(
            #     x0.T[:, :, None, :], ax=(0, 1))), ax=(0, 1)).squeeze()

            # ESPIRiT -- using ecalib, use this one!
            # use -v for numerical phantom
            lambda x0: np.sum(np.moveaxis(
                bart(1, 'ecalib -a -m1 -P -S %s' % v,
                     fft(np.moveaxis(x0, 0, -1)[:, :, None, :], ax=(0, 1))).
                squeeze(), -1, 0).conj() * x0,
                              axis=0),

            # SVD -- suprisingly good!
            lambda x0: ifft(bart(
                1, 'cc -p 1 -A -S',
                fft(np.moveaxis(x0, 0, -1)[:, :, None, :], ax=(0, 1))),
                            ax=(0, 1)).squeeze()
        ],
        [
            'Walsh', 'Inati', 'PCA (image space)', 'PCA (k-space)', 'Direct',
            'Geometric', 'ESPIRiT', 'SVD'
        ])
Пример #4
0
def calibrate_mtx_bart(data, cc_mode):
    # BART data format: [nx,ny,nz,nc]
    data = np.expand_dims(np.moveaxis(data, -1, 0), 2)
    if cc_mode == 'scc_bart':
        with suppress_stdout_stderr():
            mtx = bart.bart(1, 'cc -S -A -M', data)
    elif cc_mode == 'gcc_bart':
        with suppress_stdout_stderr():
            mtx = bart.bart(1, 'cc -G -A -M', data)
    else:
        print('unknown cc_mode "%s"' % (cc_mode))
        raise ValueError
    return mtx
def process_slice(kspace, args):
    nkx, nky, nphases, ncoils = kspace.shape

    if 0 < args.crop_size < nkx:
        # crop along readout dimension
        images = fftc.ifftc(kspace, axis=0)
        images = center_crop(images, [args.crop_size, nky, nphases, ncoils])
        kspace = fftc.fftc(images, axis=0).astype(np.complex64)
        nkx = args.crop_size

    # simulate reduced FOV
    #kspace = reduce_fov(kspace, ...)

    # compute time-average for ESPIRiT calibration
    kspace_avg = time_average(kspace, axis=-2)

    # ESPIRiT - compute sensitivity maps
    cmd = f'ecalib -d 0 -S -m {args.nmaps} -c {args.crop_value} -r {args.calib_size}'
    maps = bart.bart(1, cmd, kspace_avg[:, :, None, :])
    maps = np.reshape(maps, (nkx, nky, 1, ncoils, args.nmaps))

    # Convert everything to tensors
    kspace_tensor = cplx.to_tensor(kspace).unsqueeze(0)
    maps_tensor = cplx.to_tensor(maps).unsqueeze(0)

    # Do coil combination using sensitivity maps (PyTorch)
    A = T.SenseModel(maps_tensor)
    im_tensor = A(kspace_tensor, adjoint=True)

    # Convert tensor back to numpy array
    target = cplx.to_numpy(im_tensor.squeeze(0))

    return kspace, maps, target
def cs_total_variation(kspace):
    """
    Run Total Variation Minimization based
    reconstruction algorithm using the BART toolkit.
    """
    kspace = np.expand_dims(np.transpose(kspace, (1, 2, 0)), 0)

    sens_maps = bart.bart(1, f'ecalib -d0 -m1', kspace)

    kspace = kspace.astype(np.complex64)
    sens_maps = sens_maps.astype(np.complex64)

    pred = bart.bart(1, f'pics -d0 -S -R T:7:0:{reg_wt} -i {num_iters}',
                     kspace, sens_maps)
    pred = np.abs(pred[0])
    return pred
def BARTBufferedDataPythonGadget(connection):
   logging.info("Python reconstruction running - reading readout data")
   start = time.time()
   counter=0
   print("coucou")
   for acquisition in connection:
      
       #acquisition is a vector of structure called reconBit
       #print(type(acquisition[0]))

       for reconBit in acquisition:

           print(type(reconBit))
           # reconBit.ref is the calibration for parallel imaging
           # reconBit.data is the undersampled dataset
           print('-----------------------')
	   # each of them include a specific header and the kspace data
           print(type(reconBit.data.headers))
           print(type(reconBit.data.data))

           print(reconBit.data.headers.shape)
           print(reconBit.data.data.shape)
           
           repetition=reconBit.data.headers.flat[34].idx.repetition 
           print(repetition)
           reference_header=reconBit.data.headers.flat[34]
                     
           np.save('/tmp/gadgetron/data', reconBit.data.data)

           try: 
              if reconBit.ref.data is not None:
                print("reference data exist")            
                np.save('/tmp/gadgetron/reference', reconBit.ref.data)
              else:
                print("reference data not exist")
           except:
              print("issue with reference data")
                     
           try:
              print("calling BART") 
              # 2D ifft in bart             
              im=bart(1, 'fft -iu 7',  reconBit.data.data)
           except:
              print("issue with BART")


           #plt.subplot(121)
           #plt.imshow(np.abs(np.squeeze(reconBit.data.data[:,:,0,0,0,0,0])))
           #plt.subplot(122)
           #plt.imshow(np.abs(np.squeeze(im[:,:,0,0,0,0,0])))
           #plt.show()
           send_reconstructed_images(connection,im,reference_header)
 
      
       #connection.send(acquisition)

   logging.info(f"Python reconstruction done. Duration: {(time.time() - start):.2f} s")
Пример #8
0
def cs_total_variation(args, kspace):
    """
    Run ESPIRIT coil sensitivity estimation and Total Variation Minimization based
    reconstruction algorithm using the BART toolkit.
    """

    if args.challenge == 'singlecoil':
        kspace = kspace.unsqueeze(0)
    kspace = kspace.permute(1, 2, 0, 3).unsqueeze(0)
    kspace = tensor_to_complex_np(kspace)

    # Estimate sensitivity maps
    sens_maps = bart.bart(1, f'ecalib -d0 -m1', kspace)

    # Use Total Variation Minimization to reconstruct the image
    pred = bart.bart(
        1, f'pics -d0 -S -R T:7:0:{args.reg_wt} -i {args.num_iters}', kspace,
        sens_maps)
    pred = torch.from_numpy(np.abs(pred[0]))

    # Crop the predicted image to the correct size
    return transforms.center_crop(pred, (args.resolution, args.resolution))
Пример #9
0
'''Demo of Non-Cartesian GRAPPA.'''

import numpy as np
import matplotlib.pyplot as plt

from bart import bart  # pylint: disable=E0401

from pygrappa import ncgrappa

if __name__ == '__main__':

    # Get phantom from BART since phantominator doesn't have
    # arbitrary sampling yet...
    sx, spokes, nc = 128, 128, 8
    traj = bart(1, 'traj -r -x %d -y %d' % (sx, spokes))
    kspace = bart(1, 'phantom -k -s %d -t' % nc, traj)

    # # Do inverse gridding with NUFFT so we can get fully sampled
    # # cartesian ACS
    # igrid = bart(
    #     1, 'nufft -i -t -d %d:%d:1' % (sx, sx),
    #     traj, kspace).squeeze()
    # # plt.imshow(np.abs(igrid[..., 0]))
    # # plt.show()
    # ax = (0, 1)
    # igrid = np.fft.ifftshift(np.fft.fft2(np.fft.fftshift(
    #     igrid, axes=ax), axes=ax), axes=ax)
    #
    # # 20x20 calibration region at the center
    # ctr = int(sx/2)
    # pad = 10
Пример #10
0
    sx, spokes, nc = 128, 128, 8
    os = 2  # oversampling factor for gridding
    method = 'linear'  # interpolation strategy for gridding

    # Helper functions for sum-of-squares coil combine and ifft2
    sos = lambda x0: np.sqrt(np.sum(np.abs(x0)**2, axis=-1))
    ifft = lambda x0: np.fft.fftshift(np.fft.ifft2(
        np.fft.ifftshift(np.nan_to_num(x0), axes=(0, 1)), axes=(0, 1)),
                                      axes=(0, 1))

    # If you have BART installed, you could replicate this demo with
    # the following:
    if FOUND_BART:
        # Make a radial trajectory, we'll have to mess with it later
        # to get it to look like pygrappa usually assumes it is
        traj = bart(1, 'traj -r -x %d -y %d' % (sx, spokes))

        # Make a wrapper function for BART's nufft function,
        # assumes 2D
        bart_nufft = lambda x0: bart(1, 'nufft -i -t -d %d:%d:1' % (
            sx, sx), traj, x0.reshape((1, sx, spokes, nc))).squeeze()

        # Multicoil Shepp-Logan phantom kspace measurements
        kspace = bart(1, 'phantom -k -s %d -t' % nc, traj)

        # Make kx, ky, k look like they do for pygrappa
        bart_kx = traj[0, ...].real.flatten()
        bart_ky = traj[1, ...].real.flatten()
        bart_k = kspace.reshape((-1, nc))

        # Do the thing
Пример #11
0
def _run_reco(args):
    np.seterr(divide='ignore', invalid='ignore')
    # Create par struct to store parameters
    par = {}
    ###############################################################################
    # Read Input data   ###########################################################
    ###############################################################################
    if args.data == '':
        raise ValueError("No data file specified")

    name = os.path.normpath(args.data)
    fname = name.split(os.sep)[-1]
    h5_dataset = h5py.File(name, 'r')
    par["file"] = h5_dataset
    h5_dataset_rawdata_name = 'rawdata'
    h5_dataset_trajectory_name = 'trajectory'

    if "heart" in args.data:
        if args.acc == 2:
            R = 33
            trajectory = h5_dataset.get(h5_dataset_trajectory_name)[:, :, :33]
            rawdata = h5_dataset.get(h5_dataset_rawdata_name)[:, :, :33, :]
        elif args.acc == 3:
            R = 22
            trajectory = h5_dataset.get(h5_dataset_trajectory_name)[:, :, :22]
            rawdata = h5_dataset.get(h5_dataset_rawdata_name)[:, :, :22, :]
        elif args.acc == 4:
            R = 11
            trajectory = h5_dataset.get(h5_dataset_trajectory_name)[:, :, :11]
            rawdata = h5_dataset.get(h5_dataset_rawdata_name)[:, :, :11, :]
        else:
            R = 55
            trajectory = h5_dataset.get(h5_dataset_trajectory_name)[...]
            rawdata = h5_dataset.get(h5_dataset_rawdata_name)[...]
    else:
        R = args.acc
        trajectory = h5_dataset.get(h5_dataset_trajectory_name)[:, :, ::R]
        rawdata = h5_dataset.get(h5_dataset_rawdata_name)[:, :, ::R, :]

    [dummy, nFE, nSpokes, nCh] = rawdata.shape

    ###############################################################################
    # Read Data ###################################################################
    ###############################################################################
    par["ogf"] = float(eval(args.ogf))
    dimX, dimY, NSlice = [int(nFE / par["ogf"]), int(nFE / par["ogf"]), 1]
    data = np.require(np.squeeze(rawdata.T)[None, :, None, ...],
                      requirements='C')
    par["traj"] = np.require(
        (trajectory[0] / (2 * np.max(trajectory[0])) + 1j * trajectory[1] /
         (2 * np.max(trajectory[0]))).T[None, ...],
        requirements='C')

    par["dcf"] = np.sqrt(np.array(goldcomp.cmp(par["traj"]),
                                  dtype=DTYPE_real)).astype(DTYPE_real)
    par["dcf"] = np.require(np.abs(par["dcf"]), DTYPE_real, requirements='C')
    [NScan, NC, reco_Slices, Nproj, N] = data.shape
    ###############################################################################
    # Set sequence related parameters #############################################
    ###############################################################################
    par["NC"] = NC
    par["dimY"] = dimY
    par["dimX"] = dimX
    par["NSlice"] = NSlice
    par["NScan"] = NScan
    par["N"] = N
    par["Nproj"] = Nproj
    ###############################################################################
    # Create OpenCL Context and Queues ############################################
    ###############################################################################
    platforms = cl.get_platforms()
    par["GPU"] = False
    par["Platform_Indx"] = 0
    for j in range(len(platforms)):
        if platforms[j].get_devices(device_type=cl.device_type.GPU):
            print(
                "GPU OpenCL platform <%s> found\
 with %i device(s) and OpenCL-version <%s>" %
                (str(platforms[j].get_info(cl.platform_info.NAME)),
                 len(platforms[j].get_devices(device_type=cl.device_type.GPU)),
                 str(platforms[j].get_info(cl.platform_info.VERSION))))
            par["GPU"] = True
            par["Platform_Indx"] = j
    if not par["GPU"]:
        print("No GPU OpenCL platform found. Returning.")

    par["ctx"] = []
    par["queue"] = []
    num_dev = len(platforms[par["Platform_Indx"]].get_devices())
    par["num_dev"] = num_dev
    for device in range(num_dev):
        dev = []
        dev.append(platforms[par["Platform_Indx"]].get_devices()[device])
        tmp = cl.Context(dev)
        par["ctx"].append(tmp)
        par["queue"].append(
            cl.CommandQueue(
                tmp,
                platforms[par["Platform_Indx"]].get_devices()[device],
                properties=cl.command_queue_properties.
                OUT_OF_ORDER_EXEC_MODE_ENABLE
                | cl.command_queue_properties.PROFILING_ENABLE))
###############################################################################
# Coil Sensitivity Estimation #################################################
###############################################################################
    img_igrid = bart(1, 'nufft -i -t', trajectory, rawdata)
    img_igrid_sos = bart(1, 'rss 8', img_igrid)
    img_igrid_sos = np.abs(img_igrid_sos).astype(DTYPE)

    try:
        slices_coils = par["file"]['Coils'][()].shape[1]
        print("Using precomputed coil sensitivities")
        par["C"] = par["file"]['Coils'][
            :, int(slices_coils/2) - int(np.floor((par["NSlice"])/2)):
            int(slices_coils/2) + int(np.ceil(par["NSlice"]/2)), ...]\
            .astype(DTYPE)

        par["InScale"] = par["file"]["InScale"][
         int(slices_coils/2)-int(np.floor((par["NSlice"])/2)):
         int(slices_coils/2)+int(np.ceil(par["NSlice"]/2)), ...]\
            .astype(DTYPE_real)
    except KeyError:
        img_igrid = bart(1, 'nufft -i -t', trajectory, rawdata)
        data_bart = np.fft.fftshift(
            np.fft.fft2(np.fft.ifftshift(img_igrid.T, (-2, -1)), norm='ortho'),
            (-2, -1))
        data_bart = np.require(np.squeeze(data_bart.T).astype(DTYPE),
                               requirements='C')[None, ...]
        sens_maps = bart(1, 'ecalib -m1 -I', data_bart)
        sens_maps = np.require(np.squeeze(sens_maps).T, requirements='C')
        par["C"] = sens_maps[:, None, ...]
        par["C"] = np.require(np.transpose(par["C"], (0, 1, 3, 2)),
                              requirements='C')
        sumSqrC = np.sqrt(np.sum(np.abs(par["C"] * np.conj(par["C"])), 0))
        par["C"] = par["C"] / np.tile(sumSqrC, (par["NC"], 1, 1, 1))
        par["C"][~np.isfinite(par["C"])] = 0
        #        #### Remove backfoled part at the top
        #        par["C"][:, :, :20, :] = 0
        par["InScale"] = sumSqrC
        par["file"].create_dataset('Coils',
                                   shape=par["C"].shape,
                                   dtype=DTYPE,
                                   data=par["C"])
        par["file"].create_dataset('InScale',
                                   shape=sumSqrC.shape,
                                   dtype=DTYPE_real,
                                   data=sumSqrC)
        del sumSqrC
    par["file"].close()
    ###############################################################################
    # Set Intensity and Density Scaling ###########################################
    ###############################################################################
    if args.inscale:
        pass
    else:
        par["C"] *= par["InScale"]
        par["InScale"] = np.ones_like(par["InScale"])
    if args.denscor:
        data = data * (par["dcf"])
    else:
        par["dcf"] = np.ones_like(par["dcf"])


###############################################################################
# generate nFFT  ##############################################################
###############################################################################
    FFT = utils.NUFFT(par)

    def nFTH(x, fft, par):
        siz = np.shape(x)
        result = np.require(np.zeros(
            (par["NC"], par["NSlice"], par["NScan"], par["dimY"], par["dimX"]),
            dtype=DTYPE),
                            requirements='C')
        tmp_result = clarray.empty(
            fft.queue, (par["NScan"], 1, 1, par["dimY"], par["dimX"]),
            dtype=DTYPE)
        for j in range(siz[1]):
            for k in range(siz[2]):
                inp = clarray.to_device(
                    fft.queue,
                    np.require(x[:, j, k, ...][:, None, None, ...],
                               requirements='C'))
                fft.adj_NUFFT(tmp_result, inp)
                result[j, k, ...] = np.squeeze(tmp_result.get())
        return np.transpose(result, (2, 0, 1, 3, 4))

    images_coils = nFTH(data, FFT, par)
    images = np.require(np.sum(images_coils * (np.conj(par["C"])), axis=1),
                        requirements='C')
    del FFT, nFTH

    opt = CGReco(par)
    opt.data = data
    ###############################################################################
    # Start Reco ##################################################################
    ###############################################################################
    opt.reco_par = utils.read_config(args.config, "DEFAULT")
    opt.execute()
    result = (opt.result)
    res = opt.res
    del opt
    ###############################################################################
    # New .hdf5 save files ########################################################
    ###############################################################################
    outdir = ""
    if "heart" in args.data:
        outdir += "/heart"
    elif "brain" in args.data:
        outdir += "/brain"
    if not os.path.exists('./output'):
        os.makedirs('output')
    if not os.path.exists('./output' + outdir):
        os.makedirs("./output" + outdir)
    cwd = os.getcwd()
    os.chdir("./output" + outdir)
    f = h5py.File(
        "CG_reco_InScale_" + str(args.inscale) + "_denscor_" +
        str(args.denscor) + "_reduction_" + str(R) + "_acc_" + str(args.acc) +
        "_" + fname, "w")
    f.create_dataset("images_ifft_", images.shape, dtype=DTYPE, data=images)
    f.create_dataset("images_ifft_coils_",
                     images_coils.shape,
                     dtype=DTYPE,
                     data=images_coils)
    f.create_dataset("CG_reco", result.shape, dtype=DTYPE, data=result)
    f.create_dataset('InScale',
                     shape=par["InScale"].shape,
                     dtype=DTYPE_real,
                     data=par["InScale"])
    f.create_dataset('Bart_ref',
                     shape=img_igrid_sos.shape,
                     dtype=DTYPE,
                     data=img_igrid_sos)
    f.attrs['res'] = res
    f.flush()
    f.close()
    os.chdir(cwd)
Пример #12
0
df_err_gen['Method'] = ['GenMatch' for i in range(t_gen.shape[0])]
df_err_gen['Relative Error (%)'] = np.abs(
    (t_gen['CATE'].to_numpy() - df_true['TE'].to_numpy()) /
    df_true['TE'].mean())

cate_est_prog = prognostic.prognostic_cv('Y', 'T', df_data, n_splits=5)

df_err_prog = pd.DataFrame()
df_err_prog['Method'] = [
    'Prognostic Score' for i in range(cate_est_prog.shape[0])
]
df_err_prog['Relative Error (%)'] = np.abs(
    (cate_est_prog['avg.CATE'].to_numpy() - df_true['TE'].to_numpy()) /
    df_true['TE'].mean())

cate_est_bart = bart.bart('Y', 'T', df_data, n_splits=5)

df_err_bart = pd.DataFrame()
df_err_bart['Method'] = ['BART' for i in range(cate_est_bart.shape[0])]
df_err_bart['Relative Error (%)'] = np.abs(
    (cate_est_bart['avg.CATE'].to_numpy() - df_true['TE'].to_numpy()) /
    df_true['TE'].mean())

cate_est_cf = causalforest.causalforest('Y', 'T', df_data, n_splits=5)

df_err_cf = pd.DataFrame()
df_err_cf['Method'] = ['Causal Forest' for i in range(cate_est_cf.shape[0])]
df_err_cf['Relative Error (%)'] = np.abs(
    (cate_est_cf['avg.CATE'].to_numpy() - df_true['TE'].to_numpy()) /
    df_true['TE'].mean())
Пример #13
0
                               axes=(-2, -1))

linear_recon = linear_recon / np.max(np.abs(linear_recon))

linear_recon = np.sqrt(np.sum(T.center_crop(linear_recon, (320, 320)) ** 2, 0))

​

masked_kspace = masked_kspace.permute(1, 2, 0, 3).unsqueeze(0)

masked_kspace = tensor_to_complex_np(masked_kspace)

​

sens_maps = bart.bart(1, "ecalib -d0 -m1", masked_kspace)

​

reg_wt = 0.01

num_iters = 200

pred = np.abs(bart.bart(1, f"pics -d0 -S -R T:7:0:{reg_wt} -i {num_iters}", masked_kspace, sens_maps)[0])

pred = torch.from_numpy(pred / np.max(np.abs(pred))).cpu().numpy()

​

# check for FLAIR 203
Пример #14
0
def reduce_data(data, mdh, remove_os=False, cc_mode=False, mtx=None, ncc=None):

    if cc_mode == 'scc_bart' or cc_mode == 'gcc_bart':
        import bart

    if data.dtype == np.dtype("S1"):
        # nothing to do in case of bytearray
        return data, False, False

    x_in_timedomain = True

    rm_os_active = remove_os
    if mdh_def.is_flag_set(mdh, 'NOISEADJSCAN'):
        rm_os_active = False

    cc_active = False
    if cc_mode and mtx is not None and data.shape[0] == mtx.shape[-1]:
        cc_active = True

    if rm_os_active:
        nx = data.shape[-1]
        data, x_in_timedomain = to_freqdomain(data, x_in_timedomain)
        data = np.delete(data, slice(nx // 4, nx * 3 // 4), -1)

    reflect_data = False
    if (cc_active and (cc_mode == 'gcc' or cc_mode == 'gcc_bart')):
        reflect_data = bool(mdh['aulEvalInfoMask'][0] & (1 << 24))
        if reflect_data:
            data = data[:, ::-1]

    if cc_active:
        if cc_mode == 'scc' or cc_mode == 'gcc':
            _, nx = data.shape
            ncc = mtx.shape[1]
            if cc_mode == 'scc':
                data = mtx[0] @ data
            elif cc_mode == 'gcc':
                if nx != mtx.shape[0]:
                    # nx mismatch; deactivate cc mode
                    cc_active = False
                else:
                    data, x_in_timedomain = to_freqdomain(
                        data, x_in_timedomain)
                    for x in range(nx):
                        data[:ncc, x] = mtx[x] @ data[:, x]
                    data = data[:ncc, :]
        else:
            with suppress_stdout_stderr():
                # BART data format: [nx,ny,nz,nc]
                data = np.expand_dims(
                    np.expand_dims(np.swapaxes(data, 0, 1), 1), 1)
                if cc_mode == 'scc_bart':
                    data = bart.bart(1, 'ccapply -S -p ' + str(ncc), data, mtx)
                elif cc_mode == 'gcc_bart':
                    if data.shape[0] != mtx.shape[0]:
                        # nx mismatch; deactivate cc mode
                        cc_active = False
                    else:
                        data = bart.bart(1, 'ccapply -G -p ' + str(ncc), data,
                                         mtx)
                data = np.swapaxes(np.squeeze(data), 0, 1)

    data, x_in_timedomain = to_timedomain(data, x_in_timedomain)

    if reflect_data:
        data = data[:, ::-1]

    return np.complex64(data), rm_os_active, cc_active
def main(args):
    # Get list of all files
    input_files = glob.glob(os.path.join(args.input_path, '*.kspace'))
    num_files = len(input_files)

    # Akshay's selected test cases
    selected_files = [
        '21895_122887.kspace', '21927_204807.kspace', '21929_312327.kspace',
        '21944_092167.kspace', '21998_757767.kspace', '22038_235527.kspace',
        '22065_317447.kspace', '22068_604167.kspace', '22113_942087.kspace',
        '22242_721927.kspace', '22320_358407.kspace', '22359_153607.kspace',
        '22453_046087.kspace', '22546_194567.kspace', '22563_225287.kspace',
        '22576_296967.kspace', '22597_296967.kspace', '22671_358407.kspace',
        '22705_691207.kspace', '22723_128007.kspace', '22863_071687.kspace',
        '23080_215047.kspace', '23097_706567.kspace', '23110_696327.kspace',
        '23133_230407.kspace', '23172_153607.kspace', '23226_158727.kspace',
        '23294_153607.kspace', '23452_286727.kspace', '23454_051207.kspace',
        '23515_706567.kspace', '23536_071687.kspace', '23545_481287.kspace',
        '23601_614407.kspace', '23632_865287.kspace', '23641_537607.kspace',
        '23679_906247.kspace', '23862_138247.kspace', '23902_153607.kspace',
        '24051_686087.kspace', '24052_727047.kspace', '24079_046087.kspace',
        '24159_645127.kspace', '24326_583687.kspace', '24415_327687.kspace',
        '24478_158727.kspace', '24555_384007.kspace', '24705_501767.kspace',
        '24789_665607.kspace', '24875_353287.kspace', '24892_578567.kspace'
    ]

    # Sort test cases into a list
    test_files = []
    for file in input_files:
        if os.path.split(file)[-1] in selected_files:
            test_files.append(file)
            input_files.remove(file)

    # Figure out data split (hard-code this to 65/10/25 for now)
    num_test = len(test_files)
    num_validate = int(round(0.1 * num_files))
    num_train = num_files - num_validate - num_test

    # Sort remaining cases into training and validation lists
    train_files = input_files[:num_train]
    validate_files = input_files[num_train:]

    # Print out data split summary
    print('Total datasets: %d' % num_files)
    print('  Training datasets: %d' % num_train)
    print('  Validation datasets: %d' % num_validate)
    print('  Test datasets: %d' % num_test)

    # create data directories if they don't already exist
    if not os.path.exists(args.output_path):
        os.makedirs(args.output_path)
    if not os.path.exists(os.path.join(args.output_path, 'train')):
        os.makedirs(os.path.join(args.output_path, 'train'))
    if not os.path.exists(os.path.join(args.output_path, 'validate')):
        os.makedirs(os.path.join(args.output_path, 'validate'))
    if not os.path.exists(os.path.join(args.output_path, 'test')):
        os.makedirs(os.path.join(args.output_path, 'test'))

    for file in input_files:
        print('Processing %s...' % os.path.split(file)[-1])

        # Load HDF5 file, read k-space and image data
        hf = h5py.File(file, 'r')
        kspace = hf['kspace_real'][()] + 1j * hf['kspace_imag'][()]
        # mask = hf['mask'][()]

        # get data dimensions
        kspace = np.transpose(kspace, axes=[1, 2, 0, 3,
                                            4])  # remove this line later
        xres, yres, num_slices, num_echoes, num_coils = kspace.shape

        # pre-process k-space data (BART)#
        kspace = bart.bart(1, 'fftmod 4', kspace)  # de-modulate across slice
        kspace = bart.bart(1, 'fft -i 1', kspace)  # inverse FFT across readout

        # crop readout (to remove edge slices with overlap)
        if args.crop_readout < 1.0:
            xres_old = xres
            xres = int(round(args.crop_readout * xres_old))
            x_from = (xres_old - xres) // 2
            x_to = x_from + xres
            kspace = kspace[x_from:x_to]

        # declare arrays
        maps = np.zeros((xres, yres, num_slices, 1, num_coils, args.num_emaps),
                        dtype=np.complex64)
        images = np.zeros((xres, yres, num_slices, num_echoes, args.num_emaps),
                          dtype=np.complex64)

        for x in range(xres):
            # Process data readout pt by readout pt
            im_slice, maps_slice = process_slice(kspace[x], args)

            # Save everything into arrays
            maps[x] = maps_slice
            images[x] = im_slice

        # Determine path to new hdf5 file
        if file in train_files:
            folder = 'train'
        elif file in validate_files:
            folder = 'validate'
        else:
            folder = 'test'

        # write out new hdf5 for each echo
        for echo in range(num_echoes):
            file_new = os.path.join(args.output_path, folder,
                                    os.path.split(file)[-1])
            file_new += '.echo%d' % echo
            print(file_new)
            with h5py.File(file_new, 'w') as hf_new:
                # create datasets within HDF5
                hf_new.create_dataset('kspace', data=kspace[:, :, :, echo, :])
                hf_new.create_dataset('maps', data=maps[:, :, :, 0, :, :])
                hf_new.create_dataset('target', data=images[:, :, :, echo, :])

            if args.dbwrite:
                print('Writing out files to home folder!')
                cfl.writecfl('~/kspace', kspace)
                cfl.writecfl('~/maps', maps)
                cfl.writecfl('~/images', images)
Пример #16
0
def expand_data(data, mdh, remove_os=False, cc_mode=False, inv_mtx=None):

    if cc_mode == 'scc_bart' or cc_mode == 'gcc_bart':
        import bart

    if data.dtype == np.dtype("S1"):
        return data  # nothing to do in case of bytearray

    if inv_mtx is None:
        inv_mtx = False

    nc, nx = data.shape

    x_in_timedomain = True

    reflect_data = False
    if cc_mode == 'gcc' or cc_mode == 'gcc_bart':
        # for performance reasons, x dim was stored in freq. domain
        reflect_data = bool(mdh['aulEvalInfoMask'][0] & (1 << 24))
        if reflect_data:
            data = data[:, ::-1]

    if cc_mode == 'scc' or cc_mode == 'gcc':
        nc = inv_mtx.shape[1]
        ncc = inv_mtx.shape[-1]
        if cc_mode == 'scc':
            try:
                data = inv_mtx[0] @ data
            except:
                print('error during inv_mtx @ data')
                print('mdh flags:', mdh_def.get_active_flags(mdh))
                print('data shape: ', data.shape)
                print('inv_mtx shape: ', inv_mtx.shape)
        else:  # 'gcc'
            data, x_in_timedomain = to_freqdomain(data, x_in_timedomain)
            # pad missing channels in data with zeros
            data = np.pad(data, [(0, nc - ncc), (0, 0)])
            for x in range(nx):
                data[:, x] = inv_mtx[x] @ data[:ncc, x]
    elif cc_mode == 'scc_bart' or cc_mode == 'gcc_bart':
        with suppress_stdout_stderr():
            # BART data format: [nx,ny,nz,nc]
            data = np.expand_dims(np.expand_dims(np.swapaxes(data, 0, 1), 1),
                                  1)
            if cc_mode == 'scc_bart':
                data = bart.bart(1, 'ccapply -S -u', data, inv_mtx)
            else:  # 'gcc_bart'
                data = bart.bart(1, 'ccapply -G -u', data, inv_mtx)
            data = np.swapaxes(np.squeeze(data), 0, 1)

    if reflect_data:
        data = data[:, ::-1]

    if remove_os:
        data, x_in_timedomain = to_freqdomain(data, x_in_timedomain)
        data = np.insert(data, nx // 2, np.zeros((nx, 1), dtype=data.dtype),
                         -1)

    data, x_in_timedomain = to_timedomain(data, x_in_timedomain)

    return np.complex64(data)
Пример #17
0
'''Simple example showing proof of concept.'''

# import numpy as np
import matplotlib.pyplot as plt
from bart import bart  # pylint: disable=E0401

from virtcoilphase import virtcoilphase

if __name__ == '__main__':

    imspace = bart(1, 'phantom -x64 -s8').squeeze()
    phi_hat = virtcoilphase(imspace)

    plt.imshow(phi_hat)
    plt.show()
Пример #18
0
from skimage.restoration import unwrap_phase
from bart import bart #pylint: disable=E0401

from mr_utils import view
from mr_utils.recon.ssfp import gs_recon

if __name__ == '__main__':

    # Find the data
    path = '/home/nicholas/Documents/rawdata/GSFIELDMAP/'
    file = 'phantom_simple.npy'
    if isfile(path + file):
        im = np.load(path + file)
    else:
        rawname = 'meas_MID69_TRUFI_NBPM_2019_03_22_FID41524.dat'
        data = bart(1, 'twixread -A %s' % (path + rawname))
        print(data.shape)
        data = np.mean(data, axis=-1)
        im = np.fft.fftshift(np.fft.ifft2(
            data, axes=(0, 1)), axes=(0, 1))
        np.save(path + file, im)
    print(im.shape)

    # Get some coil sensitivity maps
    csmfile = 'csm_simple.npy'
    if isfile(path + csmfile):
        csm = np.load(path + csmfile)
    else:
        # kspace = np.fft.fftshift(np.fft.fft2(
        #     np.fft.fftshift(im, axes=(0, 1)), axes=(0, 1)),
        #     axes=(0, 1))
Пример #19
0
'''Do PARS using BART stuff.'''

from time import time

import numpy as np
import matplotlib.pyplot as plt
from bart import bart  # pylint: disable=E0401

from pygrappa import pars
from utils import gridder

if __name__ == '__main__':

    sx, spokes, nc = 256, 256, 8
    traj = bart(1, 'traj -r -x%d -y%d' % (sx, spokes))
    kx, ky = traj[0, ...].real.flatten(), traj[1, ...].real.flatten()

    # Use BART to get Shepp-Logan and sensitivity maps
    t0 = time()
    k = bart(1, 'phantom -k -s%d -t' % nc, traj).reshape((-1, nc))
    print('Took %g seconds to simulate %d coils' % (time() - t0, nc))
    sens = bart(1, 'phantom -S%d -x%d' % (nc, sx)).squeeze()

    # Undersample
    ku = k.copy()
    ku[::2] = 0

    # Take a looksie
    sos = lambda x0: np.sqrt(np.sum(np.abs(x0)**2, axis=-1))
    plt.subplot(1, 3, 1)
    plt.imshow(sos(gridder(kx, ky, k, sx, sx)))
Пример #20
0
def process_raw(group, config, metadata):
    # Create folder, if necessary
    if not os.path.exists(debugFolder):
        os.makedirs(debugFolder)
        logging.debug("Created folder " + debugFolder +
                      " for debug output files")

    # Format data into single [cha PE RO phs] array
    lin = [acquisition.idx.kspace_encode_step_1 for acquisition in group]
    phs = [acquisition.idx.phase for acquisition in group]

    # Use the zero-padded matrix size
    data = np.zeros(
        (group[0].data.shape[0],
         metadata.encoding[0].encodedSpace.matrixSize.y,
         metadata.encoding[0].encodedSpace.matrixSize.x, max(phs) + 1),
        group[0].data.dtype)

    rawHead = [None] * (max(phs) + 1)

    for acq, lin, phs in zip(group, lin, phs):
        if (lin < data.shape[1]) and (phs < data.shape[3]):
            # TODO: Account for asymmetric echo in a better way
            data[:, lin, -acq.data.shape[1]:, phs] = acq.data

            # center line of k-space is encoded in user[5]
            if (rawHead[phs] is
                    None) or (np.abs(acq.getHead().idx.kspace_encode_step_1 -
                                     acq.getHead().idx.user[5]) <
                              np.abs(rawHead[phs].idx.kspace_encode_step_1 -
                                     rawHead[phs].idx.user[5])):
                rawHead[phs] = acq.getHead()

    # Flip matrix in RO/PE to be consistent with ICE
    data = np.flip(data, (1, 2))

    # Format as [row col phs cha] for BART
    data = data.transpose((1, 2, 3, 0))

    logging.debug("Raw data is size %s" % (data.shape, ))
    np.save(debugFolder + "/" + "raw.npy", data)

    # Fourier Transform with BART
    logging.info("Calling BART FFT")
    data = bart(1, 'fft -u -i 3', data)

    # Re-format as [cha row col phs]
    data = data.transpose((3, 0, 1, 2))

    # Sum of squares coil combination
    # Data will be [PE RO phs]
    data = np.abs(data)
    data = np.square(data)
    data = np.sum(data, axis=0)
    data = np.sqrt(data)

    logging.debug("Image data is size %s" % (data.shape, ))
    np.save(debugFolder + "/" + "img.npy", data)

    # Normalize and convert to int16
    data *= 32767 / data.max()
    data = np.around(data)
    data = data.astype(np.int16)

    # Remove readout oversampling
    offset = int(
        (data.shape[1] - metadata.encoding[0].reconSpace.matrixSize.x) / 2)
    data = data[:,
                offset:offset + metadata.encoding[0].reconSpace.matrixSize.x]

    # Remove phase oversampling
    offset = int(
        (data.shape[0] - metadata.encoding[0].reconSpace.matrixSize.y) / 2)
    data = data[offset:offset +
                metadata.encoding[0].reconSpace.matrixSize.y, :]

    logging.debug("Image without oversampling is size %s" % (data.shape, ))
    np.save(debugFolder + "/" + "imgCrop.npy", data)

    # Format as ISMRMRD image data
    imagesOut = []
    for phs in range(data.shape[2]):
        # Create new MRD instance for the processed image
        # NOTE: from_array() takes input data as [x y z coil], which is
        # different than the internal representation in the "data" field as
        # [coil z y x], so we need to transpose
        tmpImg = ismrmrd.Image.from_array(data[..., phs].transpose())

        # Set the header information
        tmpImg.setHead(
            mrdhelper.update_img_header_from_raw(tmpImg.getHead(),
                                                 rawHead[phs]))
        tmpImg.field_of_view = (
            ctypes.c_float(metadata.encoding[0].reconSpace.fieldOfView_mm.x),
            ctypes.c_float(metadata.encoding[0].reconSpace.fieldOfView_mm.y),
            ctypes.c_float(metadata.encoding[0].reconSpace.fieldOfView_mm.z))
        tmpImg.image_index = phs

        # Set ISMRMRD Meta Attributes
        tmpMeta = ismrmrd.Meta()
        tmpMeta['DataRole'] = 'Image'
        tmpMeta['ImageProcessingHistory'] = ['PYTHON', 'BART']
        tmpMeta['WindowCenter'] = '16384'
        tmpMeta['WindowWidth'] = '32768'
        tmpMeta['Keep_image_geometry'] = 1

        # Add image orientation directions to MetaAttributes if not already present
        if tmpMeta.get('ImageRowDir') is None:
            tmpMeta['ImageRowDir'] = [
                "{:.18f}".format(tmpImg.getHead().read_dir[0]),
                "{:.18f}".format(tmpImg.getHead().read_dir[1]),
                "{:.18f}".format(tmpImg.getHead().read_dir[2])
            ]

        if tmpMeta.get('ImageColumnDir') is None:
            tmpMeta['ImageColumnDir'] = [
                "{:.18f}".format(tmpImg.getHead().phase_dir[0]),
                "{:.18f}".format(tmpImg.getHead().phase_dir[1]),
                "{:.18f}".format(tmpImg.getHead().phase_dir[2])
            ]

        xml = tmpMeta.serialize()
        logging.debug("Image MetaAttributes: %s", xml)
        tmpImg.attribute_string = xml
        imagesOut.append(tmpImg)

    return imagesOut
Пример #21
0
def main():
    # ARGS
    input_data_path = '/mnt/dense/data_public/fastMRI/multicoil_val'
    output_data_path = '/mnt/raid3/sandino/fastMRI/validate_full'
    center_fraction = 0.04  # number of k-space lines used to do ESPIRiT calib
    num_emaps = 1
    dbwrite = False

    input_files = glob.glob(os.path.join(input_data_path, '*.h5'))

    for file in input_files:
        # Load HDF5 file
        hf = h5py.File(file, 'r')
        # existing keys: ['ismrmrd_header', 'kspace', 'reconstruction_rss']

        # load k-space and image data from HDF5 file
        kspace_orig = hf['kspace'][()]
        im_rss = hf['reconstruction_rss'][()]  # (33, 320, 320)

        # get data dimensions
        num_slices, num_coils, num_kx, num_ky = kspace_orig.shape
        xres, yres = im_rss.shape[1:3]  # matrix size
        num_low_freqs = int(round(center_fraction * yres))

        # allocate memory for new arrays
        im_shape = (xres, yres)
        kspace = np.zeros((num_slices, xres, yres, num_coils),
                          dtype=np.complex64)
        maps = np.zeros((num_slices, xres, yres, num_coils, num_emaps),
                        dtype=np.complex64)
        im_truth = np.zeros((num_slices, xres, yres, num_emaps),
                            dtype=np.complex64)

        for sl in range(num_slices):
            kspace_slice = np.transpose(kspace_orig[sl], axes=[1, 2, 0])
            kspace_slice = kspace_slice[:, :, None, :]

            # Data dimensions for BART:
            #  kspace - (kx, ky, 1, coils)
            #  maps - (kx, ky, 1, coils, emaps)
            # Data dimensions for PyTorch:
            #  kspace - (1, kx, ky, coils, real/imag)
            #  maps   - (1, kx, ky, coils, emaps, real/imag)

            # Pre-process k-space data (PyTorch)
            kspace_tensor = cplx.to_tensor(
                np.transpose(kspace_slice, axes=[2, 0, 1,
                                                 3]))  # (1, 640, 372, 15, 2)
            image_tensor = T.ifft2(kspace_tensor)
            print(image_tensor.size())
            image_tensor = cplx.center_crop(image_tensor, im_shape)
            kspace_tensor = T.fft2(image_tensor)
            kspace_slice = np.transpose(cplx.to_numpy(kspace_tensor),
                                        axes=[1, 2, 0, 3])

            # Compute sensitivity maps (BART)
            maps_slice = bart.bart(
                1, f'ecalib -d 0 -m {num_emaps} -c 0.1 -r {num_low_freqs}',
                kspace_slice)
            maps_slice = np.reshape(maps_slice,
                                    (xres, yres, 1, num_coils, num_emaps))
            maps_tensor = cplx.to_tensor(
                np.transpose(maps_slice, axes=[2, 0, 1, 3, 4]))

            # Do coil combination using sensitivity maps (PyTorch)
            A = T.SenseModel(maps_tensor)
            im_tensor = A(kspace_tensor, adjoint=True)

            # Convert image tensor to numpy array
            im_slice = cplx.to_numpy(im_tensor)

            # Re-shape and save everything
            kspace[sl] = np.reshape(kspace_slice, (xres, yres, num_coils))
            maps[sl] = np.reshape(maps_slice,
                                  (xres, yres, num_coils, num_emaps))
            im_truth[sl] = np.reshape(im_slice, (xres, yres, num_emaps))

        # write out new hdf5
        file_new = os.path.join(output_data_path, os.path.split(file)[-1])
        with h5py.File(file_new, 'w') as hf_new:
            # create datasets within HDF5
            hf_new.create_dataset('kspace', data=kspace)
            hf_new.create_dataset('maps', data=maps)
            hf_new.create_dataset('reconstruction_espirit', data=im_truth)
            hf_new.create_dataset('reconstruction_rss',
                                  data=im_rss)  # provided by fastMRI
            hf_new.create_dataset('ismrmrd_header', data=hf['ismrmrd_header'])

            # create attributes (metadata)
            for key in hf.attrs.keys():
                hf_new.attrs[key] = hf.attrs[key]

        if dbwrite:
            hf_new = h5py.File(file_new, 'r')
            print('Keys:', list(hf_new.keys()))
            print('Attrs:', dict(hf_new.attrs))
            cfl.writecfl('/home/sandino/maps', hf_new['maps'][()])
            cfl.writecfl('/home/sandino/kspace', hf_new['kspace'][()])
            cfl.writecfl('/home/sandino/im_truth',
                         hf_new['reconstruction_rss'][()])
            cfl.writecfl('/home/sandino/im_recon',
                         hf_new['reconstruction_espirit'][()])
Пример #22
0
trajectory = h5_dataset.get(h5_dataset_trajectory_name).value
rawdata = h5_dataset.get(h5_dataset_rawdata_name).value

[dummy, nFE, nSpokes, nCh] = rawdata.shape

#%% Display rawdata and trajectory
plt.figure(1)
plt.imshow(np.log(1 + np.abs(rawdata[0, :, :, 0])), cmap="gray")
plt.axis('off')
plt.title('rawdata coil 1')

#%% Subsample
#R = 2
#trajectory = trajectory[:,:,1::R]
#rawdata = rawdata[:,:,1::R,:]
#[dummy,nFE,nSpokes,nCh] = rawdata.shape

#%%  Demo: NUFFT reconstruction with BART
# inverse gridding
img_igrid = bart(1, 'nufft -i -t', trajectory, rawdata)

# channel combination
img_igrid_sos = bart(1, 'rss 8', img_igrid)
img_igrid_sos = np.abs(img_igrid_sos)

#%% Display results
plt.figure(2)
plt.imshow(np.fliplr(np.flipud(img_igrid_sos)), cmap="gray")
plt.axis('off')
plt.title('Regridding SOS reconstruction')
        ##Prognostic
        prog_cate = prognostic.prognostic_cv('Y', 'T', df_data)

        err_prog = [
            np.nanmean(
                list(
                    np.array(list(np.abs(t_true - prog_cate['avg.CATE']))) /
                    ate_true))
        ]
        label_prog = ['Prognostic Score' for i in range(len(err_prog))]
        print('Prognostic ' + str(np.mean(err_prog)))

        #----------------------------------------------------------------------------------------------
        ##DBARTS
        try:
            bart_cate = bart.bart('Y', 'T', df_data, n_splits=5)

            err_bart = [
                np.nanmean(
                    list(np.abs(bart_cate['avg.CATE'] - t_true) / ate_true))
            ]
            label_bart = ['BART' for i in range(len(err_bart))]
        except:
            err_bart = [np.NaN]
            label_bart = ['BART' for i in range(len(err_bart))]

        #----------------------------------------------------------------------------------------------
        ##Causal Forest
        crf_cate = causalforest.causalforest('Y', 'T', df_data, n_splits=5)

        err_crf = [
rcomb = np.sum(k2i(K)*np.conj(sMaps[:,:,:,:,None,None,None]),3)/sos
regFactor = np.max(np.abs(rcomb.flatten()))
print('scaling Factor for recon: ', regFactor)

#set up Recon
regWeight = 0.012 #lambda in Cost function
blk = 16 #block size for locally low rank recon
bart_string = 'pics -u1 -w 1 -H -d5 -e -i 80 -R L:7:7:%.2e -b %d' % (regFactor*regWeight,blk)

#loop through velocity encodings and perform LLR recon for each of them
szIm = np.shape(K[:,:,:,0,:,:,:]) #image dimensions
I = np.zeros(szIm,dtype=np.complex64)
for i in range(np.size(K,5)):
    # perform recom with BART; BART expects other dimensions, therefore we add singleton dimensions to shift dynamics to position
    # where BART does not interpret them in a specific way
    tmp = bart.bart(1,bart_string,K[:,:,:,:,None,None,None,None,:,i,:],sMaps)
    print('I.shape', I.shape, 'tmp.shape ', tmp.shape)
    I[:,:,:,:,i,:] = np.squeeze(tmp) 

#store recon results
sio.savemat(os.path.join(sys.path[0], "img.mat"),{'I':I})

I = I[:,:,:,:,:,0] #take expiratory data only

I = I[:,:,11,None,5,None,:] #take data from systolic frame and single slice to reduce time for Bayes unfolding

#
venc = np.array([0.5, 1.5])
kv_rec = np.array([0, np.pi / venc[0], np.pi / venc[1]])  # .transpose()
#kv_rec = kv_rec[:, None]
kv_rec = np.tile(kv_rec[:, None], (1, 3))
Пример #25
0
def bart_nufft(x0):
    return bart(1, 'nufft -i -t -d %d:%d:1' % (sx, sx), traj,
                x0.reshape((1, sx, spokes, nc))).squeeze()