Ejemplo n.º 1
0
def prep_input(im, acc=4):
    """Undersample the batch, then reformat them into what the network accepts.

    Parameters
    ----------
    gauss_ivar: float - controls the undersampling rate.
                        higher the value, more undersampling
    """
    mask = cs.cartesian_mask(im.shape, acc, sample_n=8)
    im_und, k_und = cs.undersample(im, mask, centred=False, norm='ortho')
    im_gnd_l = to_lasagne_format(im)
    im_und_l = to_lasagne_format(im_und)
    k_und_l = to_lasagne_format(k_und)
    mask_l = to_lasagne_format(mask, mask=True)

    return im_und_l, k_und_l, mask_l, im_gnd_l
Ejemplo n.º 2
0
def prep_input(im, acc=4):
    """Undersample the batch, then reformat them into what the network accepts.

    Parameters
    ----------
    gauss_ivar: float - controls the undersampling rate.
                        higher the value, more undersampling
    """
    mask = cs.cartesian_mask(im.shape, acc, sample_n=8)
    im_und, k_und = cs.undersample(im, mask, centred=False, norm='ortho')
    im_gnd_l = to_lasagne_format(im)
    im_und_l = to_lasagne_format(im_und)
    k_und_l = to_lasagne_format(k_und)
    mask_l = to_lasagne_format(mask, mask=True)

    return im_und_l, k_und_l, mask_l, im_gnd_l
Ejemplo n.º 3
0
    if not os.path.isdir(save_dir):
        os.makedirs(save_dir)

    # Specify network (d2_c2 or d5_c5)
    input_shape = (batch_size, 2, Nx, Ny)
    # net_config, net,  = build_d2_c2(input_shape)
    net_config, net,  = build_d5_c5(input_shape)

    # Load D5-C5 with pretrained params  !! only build_d5_c5 is selected !!
    # D5-C5 with pre-trained parameters
    # with np.load('./models/pretrained/d5_c5.npz') as f:
    #     param_values = [f['arr_{0}'.format(i)] for i in range(len(f.files))]
    #     lasagne.layers.set_all_param_values(net, param_values)

    # Compute acceleration rate
    dummy_mask = cs.cartesian_mask((10, Nx, Ny), acc, sample_n=8)
    sample_und_factor = cs.undersampling_rate(dummy_mask)
    print('Undersampling Rate: {:.2f}'.format(sample_und_factor))

    # Compile function
    train_fn, val_fn = compile_fn(net, net_config, args)


    # Create dataset
    # train, validate, test = create_dummy_data()
    train, validate, test = load_training_valid_test_data()

    print('Start Training...')
    for epoch in xrange(num_epoch):
        t_start = time.time()
        # Training
Ejemplo n.º 4
0
    if not os.path.isdir(save_dir):
        os.makedirs(save_dir)

    # Specify network
    input_shape = (batch_size, 2, Nx, Ny)
    net_config, net,  = build_d2_c2(input_shape)

    # # Load D5-C5 with pretrained params
    # net_config, net,  = build_d5_c5(input_shape)
    # D5-C5 with pre-trained parameters
    # with np.load('./models/pretrained/d5_c5.npz') as f:
    #     param_values = [f['arr_{0}'.format(i)] for i in range(len(f.files))]
    #     lasagne.layers.set_all_param_values(net, param_values)

    # Compute acceleration rate
    dummy_mask = cs.cartesian_mask((10, Nx, Ny), acc, sample_n=8)
    sample_und_factor = cs.undersampling_rate(dummy_mask)
    print('Undersampling Rate: {:.2f}'.format(sample_und_factor))

    # Compile function
    train_fn, val_fn = compile_fn(net, net_config, args)


    # Create dataset
    train, validate, test = create_dummy_data()

    print('Start Training...')
    for epoch in xrange(num_epoch):
        t_start = time.time()
        # Training
        train_err = 0
Ejemplo n.º 5
0
arr1 = sitk.GetArrayFromImage(img1)
arr2 = sitk.GetArrayFromImage(img2)
arr3 = sitk.GetArrayFromImage(img3)
arr4 = sitk.GetArrayFromImage(img4)
arr5 = sitk.GetArrayFromImage(img5)
arr6 = sitk.GetArrayFromImage(img6)
#combining array
arr = np.concatenate((arr1, arr2, arr3, arr4, arr5, arr6), axis=0)
np.save('image_array', arr)

#fft to numpy array
fft_arr = np.fft.fft2(arr)
fshift_out = np.fft.fftshift(fft_arr)

#cartesian mask
mask_1 = cs.cartesian_mask(arr.shape, 4, sample_n=10, centred=False)
im_und, k_und = cs.undersample(arr, mask_1, centred=False, norm='ortho')

#normalising the input and output data
features_IMG = tf.keras.utils.normalize(im_und, axis=1)
x_train = np.expand_dims(features_IMG, 3)
print(np.shape(x_train))
features_KS = tf.keras.utils.normalize(k_und, axis=1)
x_ks_train = np.expand_dims(features_KS, 3)
Target = tf.keras.utils.normalize(arr, axis=1)
y_train = np.expand_dims(Target, 3)

mask_data = np.expand_dims(mask_1[1], 3)
mask_ksp = mask_data.astype(int)
inv_mask = np.bitwise_not(mask_ksp)
print(np.shape(mask_data))
Ejemplo n.º 6
0
    la = 0.001
    warm_start = 'off'
    warm_start_factor = 0
    perp_start = 'rand'
    perp_start_factor = 0.01
    momentum = 0.9
    learning_rate = 0.01
    verbose = True

    data = load_data()
    #batch_size = data.shape[0];
    Nt, Nx, Ny = data.shape
    input_shape = (batch_size, 2, Nx, Ny)

    mask = cs.cartesian_mask((batch_size, Nx, Ny),
                             undersampling_rate,
                             sample_n=8)

    #    mask = np.zeros([2,2,Nx,Ny]).astype('float32');
    #    mask[:,0,:,:]= dummy_mask;
    #    mask[:,1,:,:]= dummy_mask;
    #
    #    runner_id = 25;
    #    runner = load_runner(runner_id);
    #    fname = 'data/data_runner_%d_' % (runner_id);
    #    runner.export_data(fname);
    #
    #    mask = runner.mask[0];

    net_config, net = load_network(input_shape, network_path)
    val_fn, df = compile_functions(net, net_config)
Ejemplo n.º 7
0
    # Configure directory info
    project_root = '.'
    save_dir = join(project_root, 'models/%s' % model_name)
    if not os.path.isdir(save_dir):
        os.makedirs(save_dir)

    # Specify network
    input_shape = (batch_size, 2, Nx, Ny)
    net_config, net, = build_UnetCascade(input_shape)
    print(net_config)
    # net_config, net,  = build_d2_c2(input_shape)

    # Compute acceleration rate
    dummy_mask = cs.cartesian_mask((500, Nx, Ny),
                                   gauss_ivar,
                                   sample_high_freq=True,
                                   sample_centre=True,
                                   sample_n=8)
    acc = dummy_mask.size / np.sum(dummy_mask)
    print('Acceleration Rate: {:.2f}'.format(acc))

    # Compile function
    train_fn, val_fn = compile_fn(net, net_config, args)

    # Create dataset
    train, validate, test = create_data_knee()
    print('data created')

    #uncomment to start with already trained model
    #name = '%s_epoch_%d.npz' % (model_name, 8550)
    #param=np.load(join(save_dir, name))