Ejemplo n.º 1
0
def CT(pix, phantom, angles, src_rad, noise, Exp_bin, bin_param, f_load_path,
       g_load_path):
    voxels = [pix, pix, pix]
    det_rad = 0
    if g_load_path is not None and f_load_path is not None:
        data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad, 
                               det_rad, load_data_g=g_load_path, 
                               load_data_f=f_load_path)
    elif g_load_path is not None:
        data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad, 
                               det_rad, load_data_g=g_load_path)
    elif f_load_path is not None:
        data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad, 
                               det_rad, load_data_f=f_load_path)
    else:
        data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad, 
                               det_rad)

  
    # %% Create the circular cone beam CT class
    CT_obj = ddf.CCB_CT(data_obj)

    CT_obj.init_algo()
    CT_obj.init_DDF_FDK(bin_param, Exp_bin)
    return CT_obj
Ejemplo n.º 2
0
def CT(pix, phantom, angles, src_rad, noise, nTrain, nTD, nVal, nVD,
              Exp_bin, bin_param, f_load_path, g_load_path):
    
    voxels = [pix, pix, pix]
    det_rad = 0
    if g_load_path is not None:
        if f_load_path is not None:
            data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad,
                                   det_rad, load_data_g=g_load_path,
                                   load_data_f=f_load_path)
        else:
            data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad,
                               det_rad, load_data_g=g_load_path)
            
    else:
        data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad,
                                   det_rad)

    CT_obj = ddf.CCB_CT(data_obj)
    CT_obj.init_algo()
    spf_space, Exp_op = ddf.support_functions.ExpOp_builder(bin_param,
                                                         CT_obj.filter_space,
                                                         interp=Exp_bin)
    # Create the FDK binned operator
    CT_obj.FDK_bin_nn = CT_obj.FDK_op * Exp_op

    # Create the NN-FDK object
    CT_obj.NNFDK = nn.NNFDK_class(CT_obj, nTrain, nTD, nVal, nVD, Exp_bin,
                                   Exp_op, bin_param)
    CT_obj.rec_methods += [CT_obj.NNFDK]
    return CT_obj
Ejemplo n.º 3
0
def CT(pix, phantom, angles, src_rad, noise):
    voxels = [pix, pix, pix]
    det_rad = 0
    data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad, det_rad)

    CT_obj = ddf.CCB_CT(data_obj)
    CT_obj.init_algo()
    return CT_obj
Ejemplo n.º 4
0
def CT(pix, phantom, angles, src_rad, noise, Exp_bin, bin_param, f_load_path,
       g_load_path):
    voxels = [pix, pix, pix]
    det_rad = 0
    data_obj = ddf.phantom(voxels,
                           phantom,
                           angles,
                           noise,
                           src_rad,
                           det_rad,
                           samp_fac=1)

    # %% Create the circular cone beam CT class
    CT_obj = ddf.CCB_CT(data_obj)

    CT_obj.init_algo()
    CT_obj.init_DDF_FDK(bin_param, Exp_bin)
    return CT_obj
Ejemplo n.º 5
0
def Create_dataset(pix, phantom, angles, src_rad, noise, Exp_bin, bin_param):
    if phantom == 'Defrise':
        phantom = 'Defrise random'
    if phantom == 'Fourshape_test':
        phantom = 'Fourshape'
    # Maximum number of voxels considered per dataset
    MaxVoxDataset = np.max([int(pix**3 * 0.005), 1 * 10**6])

    # The size of the measured objects in voxels
    voxels = [pix, pix, pix]
    data_obj = ddf.phantom(voxels, phantom)
    det_rad = 0

    case = ddf.CCB_CT(data_obj, angles, src_rad, det_rad, noise)
    # Initialize the algorithms (FDK, SIRT)
    case.init_algo()
    # Create a binned filter space and a expansion operator
    spf_space, Exp_op = ddf.support_functions.ExpOp_builder(bin_param,
                                                            case.filter_space,
                                                            interp=Exp_bin)
    # Create FDK operator that takes binned filters
    FDK_bin_nn = case.FDK_op * Exp_op

    # Create a sampling operator
    S = SamplingOp(case.reco_space, MaxVoxDataset, case.WV_path)

    # Create the Operator related to the learning matrix
    B = S * FDK_bin_nn
    # Compute the learning matrix
    Bmat = odl.operator.oputils.matrix_representation(B)

    # Create the target data
    v_gt = np.asarray(S(case.phantom.f))
    Data = np.concatenate((Bmat, v_gt[:, None]), 1)

    return Data
Ejemplo n.º 6
0
#g_load_path = lp + 'CS_A64_g.npy'
noise = ['Poisson', 2**10]

# The amount of projection angles in the measurements
angles = 360
# Source to center of rotation radius
src_rad = 10
det_rad = 0
# Variables above are expressed in the phyisical size of the measured object

# Noise model
# Options: None,  ['Gaussian', %intensity], ['Poisson', I_0], ['loaded data',
#                    filename]

# Create a data object
data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad,
                       det_rad)  #, load_data=f_load_path)

# Expansion operator and binnin parameter
expansion_op = 'linear'
bin_param = 2

# %% Create the circular cone beam CT class
case = ddf.CCB_CT(data_obj)  #,
##                  load_data=g_load_path)
## Initialize the algorithms (FDK, SIRT)
case.init_algo()
case.init_DDF_FDK()
# %%
rec_RL = case.FDK.do('Ram-Lak', compute_results='no')
rec_RL_B2 = case.FDK.filt_LP('Shepp-Logan', ['Bin', 2], compute_results='no')
rec_RL_B4 = case.FDK.filt_LP('Shepp-Logan', ['Bin', 4], compute_results='no')
Ejemplo n.º 7
0
    LS[it, :] = rec[mid, mid, :]


def save_results(av_rec, CS, CS2, LS, path, meth):
    np.save(f'{path}_rec_{meth}', av_rec)
    np.save(f'{path}_CS_{meth}', CS)
    np.save(f'{path}_CS2_{meth}', CS2)
    np.save(f'{path}_LS_{meth}', LS)


# %%
for i in tqdm(range(nTests)):
    data_obj = ddf.phantom(voxels,
                           phantom,
                           angles,
                           noise,
                           src_rad,
                           det_rad,
                           samp_fac=1)

    ## %% Create the circular cone beam CT class
    case = ddf.CCB_CT(data_obj)  #
    ## Initialize the algorithms (FDK, SIRT)
    case.init_algo()
    case.init_DDF_FDK()
    case.TFDK.do(lam='optim')
    x = case.TFDK.results.var[-1]
    if i == 0:
        rec_RL = case.FDK.do('Ram-Lak', compute_results=False)
        rec_G8 = case.FDK.filt_LP('Shepp-Logan', ['Gauss', 8],
                                  compute_results=False)
Ejemplo n.º 8
0
def main(pix, phantom, nTD, nTrain, nVD, nVal, train, bpath, stop_crit,
         PH, angles, src_rad, det_rad, noise, Exp_bin, bin_param):
    # Specific phantom

    
    # %%
    t1 = time.time()
    nn.Create_TrainingValidationData(pix, phantom, angles, src_rad, noise,
                                     Exp_bin, bin_param, nTD + nVD,
                                     base_path=bpath)
    print('Creating training and validation datasets took', time.time() - t1,
          'seconds')
    
    # %% Create a test phantom

    voxels = [pix, pix, pix]
    # Create a data object
    t2 = time.time()
    if not train:
        data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad,
                               det_rad)#,
        #                       compute_xHQ=True)
        print('Making phantom and mask took', time.time() -t2, 'seconds')
        # The amount of projection angles in the measurements
        # Source to center of rotation radius
         
        t3 = time.time()
        # %% Create the circular cone beam CT class
        case = ddf.CCB_CT(data_obj)#, angles, src_rad, det_rad, noise)
        print('Making data and operators took', time.time()-t3, 'seconds')
        # Initialize the algorithms (FDK, SIRT)
        t4 = time.time()
        case.init_algo()
        
        # %% Create NN-FDK algorithm setup
        # Create binned filter space and expansion operator
        spf_space, Exp_op = ddf.support_functions.ExpOp_builder(bin_param,
                                                             case.filter_space,
                                                             interp=Exp_bin)
        # Create the FDK binned operator
        case.FDK_bin_nn = case.FDK_op * Exp_op
        
        # Create the NN-FDK object
        case.NNFDK = nn.NNFDK_class(case, nTrain, nTD, nVal, nVD, Exp_bin,
                                    Exp_op,
                                     bin_param, base_path=bpath)
        case.rec_methods += [case.NNFDK]
        case.MSD = msd.MSD_class(case, case.NNFDK.data_path)
        case.rec_methods += [case.MSD]
        print('Initializing algorithms took', time.time() - t4, 'seconds')
    else:
        data_path = nn.make_data_path(pix, phantom, angles, src_rad, noise,
                                      Exp_bin, bin_param, base_path=bpath)
        MSD = msd.MSD_class(None, data_path)

    
    # %%

    
    l_tr, l_v = nn.Preprocess_datasets.random_lists(nTD, nVD)
    if nVD == 0:
        list_tr = [0]
        list_v = None
    elif nVD == 1:
        list_tr = [0]
        list_v = [1]
    else:
        list_tr = [i for i in range(10)]
        list_v = [i + 10 for i in range(5)]
        
    if train:
        print('Started training function')
        MSD.train(list_tr, list_v, stop_crit=stop_crit, ratio=3)
    
    else:
        case.MSD.add2sp_list(list_tr, list_v)
        case.MSD.do()
        # %%
        print('MSD rec time:', case.MSD.results.rec_time[0])
#        print('NNFDK rec time:', case.NNFDK.results.rec_time[0])
#        print('FDK rec time:', case.FDK.results.rec_time[0])
        # %%
        save_path = '/bigstore/lagerwer/NNFDK_results/figures/'
        pylab.close('all')
        case.table()
        case.show_phantom()
        case.MSD.show(save_name=f'{save_path}MSD_{PH}_nTD{nTD}_nVD{nVD}')
Ejemplo n.º 9
0
                                 phantom,
                                 angles,
                                 src_rad,
                                 noise,
                                 Exp_bin,
                                 bin_param,
                                 nTD + nVD,
                                 base_path=bpath)
print('Creating training and validation datasets took',
      time.time() - t1, 'seconds')

# %% Create a test phantom
voxels = [pix, pix, pix]
# Create a data object
t2 = time.time()
data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad, det_rad)  #,
#                       compute_xHQ=True)
print('Making phantom and mask took', time.time() - t2, 'seconds')
# The amount of projection angles in the measurements
# Source to center of rotation radius

t3 = time.time()
# %% Create the circular cone beam CT class
case = ddf.CCB_CT(data_obj)  #, angles, src_rad, det_rad, noise)
print('Making data and operators took', time.time() - t3, 'seconds')
# Initialize the algorithms (FDK, SIRT)
t4 = time.time()
case.init_algo()

# %% Create NN-FDK algorithm setup
# Create binned filter space and expansion operator
Ejemplo n.º 10
0
def Create_dataset_ASTRA_sim(pix, phantom, angles, src_rad, noise, Exp_bin,
                             bin_param, **kwargs):
    if phantom == 'Defrise':
        phantom = 'Defrise random'
    if phantom == 'Fourshape_test':
        phantom = 'Fourshape'

    if 'MaxVoxDataset' in kwargs:
        MaxVoxDataset = kwargs['MaxVoxDataset']

    else:
        MaxVoxDataset = np.max([int(pix**3 * 0.005), 1 * 10**6])

    # The size of the measured objects in voxels
    voxels = [pix, pix, pix]
    dpix = [voxels[0] * 2, voxels[1]]
    u, v = dpix
    # ! ! ! This will lead to some problems later on ! ! !
    det_rad = 0
    data_obj = ddf.phantom(voxels,
                           phantom,
                           angles,
                           noise,
                           src_rad,
                           det_rad,
                           compute_xHQ=True)
    WV_obj = ddf.support_functions.working_var_map()
    WV_path = WV_obj.WV_path
    data_obj.make_mask(WV_path)
    Smat = Make_Smat(voxels, MaxVoxDataset, WV_path)
    # %% saving tiffs for CNNs
    w_du = data_obj.w_detu
    filt = make_hann_filt(voxels, w_du)
    xFDK = ddf.FDK_ODL_astra_backend.FDK_astra(data_obj.g, filt,
                                               data_obj.geometry,
                                               data_obj.reco_space, None)

    # %% Create geometry
    # Make a circular scanning geometry
    minvox = data_obj.reco_space.min_pt[0]
    maxvox = data_obj.reco_space.max_pt[0]
    vox = np.shape(data_obj.reco_space)[0]
    vol_geom = astra.create_vol_geom(vox, vox, vox, minvox, maxvox, minvox,
                                     maxvox, minvox, maxvox)

    ang = np.linspace((1 / angles) * np.pi, (2 + 1 / angles) * np.pi, angles,
                      False)
    w_du, w_dv = 2 * data_obj.geometry.detector.partition.max_pt / [u, v]
    proj_geom = astra.create_proj_geom('cone', w_dv, w_du, v, u, ang,
                                       data_obj.geometry.src_radius,
                                       data_obj.geometry.det_radius)
    filter_part = odl.uniform_partition(-data_obj.detecsize[0],
                                        data_obj.detecsize[0], u)

    filter_space = odl.uniform_discr_frompartition(filter_part,
                                                   dtype='float64')
    spf_space, Exp_op = ddf.support_functions.ExpOp_builder(bin_param,
                                                            filter_space,
                                                            interp=Exp_bin)
    nParam = np.size(spf_space)

    fullFilterSize = int(2**(np.ceil(np.log2(dpix[0])) + 1))
    halfFilterSize = fullFilterSize // 2 + 1

    Resize_Op = odl.ResizingOperator(Exp_op.range, ran_shp=(fullFilterSize, ))
    # %% Create forward and backward projector
    #    project_id = astra.create_projector('cuda3d', proj_geom, vol_geom)
    #    W = astra.OpTomo(project_id)

    # %% Create data
    proj_data = np.transpose(np.asarray(data_obj.g), (2, 0, 1)).copy()
    #    W.FP(np.transpose(np.asarray(data_obj.f), (2, 1, 0)))

    # ! ! ! wat is deze? ! ! !
    # if noise is not None:
    #     g = add_poisson_noise(proj_data, noise[1])
    # else:
    g = proj_data

    proj_id = astra.data3d.link('-sino', proj_geom, g)

    rec = np.zeros(astra.geom_size(vol_geom), dtype=np.float32)
    rec_id = astra.data3d.link('-vol', vol_geom, rec)

    B = np.zeros((MaxVoxDataset, nParam + 1))

    # %% Make the matrix columns of the matrix B
    for nP in range(nParam):
        unit_vec = spf_space.zero()
        unit_vec[nP] = 1
        filt = Exp_op(unit_vec)

        rs_filt = Resize_Op(filt)

        f_filt = np.real(np.fft.rfft(np.fft.ifftshift(rs_filt)))
        filter2d = np.zeros((angles, halfFilterSize))
        for i in range(angles):
            filter2d[i, :] = f_filt * 4 * w_du

        # %% Make a filter geometry
        filter_geom = astra.create_proj_geom('parallel', w_du, halfFilterSize,
                                             np.zeros((angles)))

        filter_id = astra.data2d.create('-sino', filter_geom, filter2d)
        #

        cfg = astra.astra_dict('FDK_CUDA')
        cfg['ReconstructionDataId'] = rec_id
        cfg['ProjectionDataId'] = proj_id
        cfg['option'] = {'FilterSinogramId': filter_id}
        # Create the algorithm object from the configuration structure
        alg_id = astra.algorithm.create(cfg)

        # %%
        astra.algorithm.run(alg_id)
        rec = np.transpose(rec, (2, 1, 0))
        B[:, nP] = rec[Smat]
    # %%
    # Clean up. Note that GPU memory is tied up in the algorithm object,
    # and main RAM in the data objects.
    B[:, -1] = data_obj.xHQ[Smat]
    #    B[:, -1] = data_obj.f[Smat]
    astra.algorithm.delete(alg_id)
    astra.data3d.delete(rec_id)
    astra.data3d.delete(proj_id)
    return B, data_obj.xHQ, xFDK
Ejemplo n.º 11
0
def main(phantom, nTD, nVD, train):
    pix = 1024
    # Specific phantom

    if phantom == 'Fourshape_test':
        PH = '4S'
        src_rad = 10
        noise = ['Poisson', 2**8]
    elif phantom == 'Defrise':
        PH = 'DF'
        src_rad = 2
        noise = None

    # Number of angles
    angles = 360
    # Source radius
    det_rad = 0
    # Noise specifics

    # Number of voxels used for training, number of datasets used for training
    nTrain = 1e6
    # Number of voxels used for validation, number of datasets used for validation
    nVal = 1e6

    # Specifics for the expansion operator
    Exp_bin = 'linear'
    bin_param = 2
    bpath = '/export/scratch2/lagerwer/data/NNFDK/'

    # %%
    t1 = time.time()
    nn.Create_TrainingValidationData(pix,
                                     phantom,
                                     angles,
                                     src_rad,
                                     noise,
                                     Exp_bin,
                                     bin_param,
                                     nTD + nVD,
                                     base_path=bpath)
    print('Creating training and validation datasets took',
          time.time() - t1, 'seconds')

    # %% Create a test phantom
    voxels = [pix, pix, pix]
    # Create a data object
    t2 = time.time()
    data_obj = ddf.phantom(voxels, phantom, angles, noise, src_rad,
                           det_rad)  #,
    #                       compute_xHQ=True)
    print('Making phantom and mask took', time.time() - t2, 'seconds')
    # The amount of projection angles in the measurements
    # Source to center of rotation radius

    t3 = time.time()
    # %% Create the circular cone beam CT class
    case = ddf.CCB_CT(data_obj)  #, angles, src_rad, det_rad, noise)
    print('Making data and operators took', time.time() - t3, 'seconds')
    # Initialize the algorithms (FDK, SIRT)
    t4 = time.time()
    case.init_algo()

    # %% Create NN-FDK algorithm setup
    # Create binned filter space and expansion operator
    spf_space, Exp_op = ddf.support_functions.ExpOp_builder(bin_param,
                                                            case.filter_space,
                                                            interp=Exp_bin)
    # Create the FDK binned operator
    case.FDK_bin_nn = case.FDK_op * Exp_op

    # Create the NN-FDK object
    case.NNFDK = nn.NNFDK_class(case,
                                nTrain,
                                nTD,
                                nVal,
                                nVD,
                                Exp_bin,
                                Exp_op,
                                bin_param,
                                base_path=bpath)
    case.rec_methods += [case.NNFDK]
    print('Initializing algorithms took', time.time() - t4, 'seconds')

    # %%
    if not train:
        case.FDK.do('Hann')
        case.NNFDK.train(4)
        case.NNFDK.do()