예제 #1
0
if anomaly == '4x':
    dataset_version = 0
elif anomaly == '2x':
    dataset_version = 4
elif anomaly == '3x':
    dataset_version = 5

Xn_trn, Xn_val, Xn_tst, Xa_trn, Xa_tst = load_MRI_anomaly_labels(
    docker=docker,
    train=train,
    val=val,
    normal=test,
    anomaly=test,
    version=dataset_version)
print_red('Data 0-1 normalization ...')
Xn_trn, Xn_val, Xn_tst, Xa_trn, Xa_tst = normalize_0_1(Xn_trn), normalize_0_1(
    Xn_val), normalize_0_1(Xn_tst), normalize_0_1(Xa_trn), normalize_0_1(
        Xa_tst)
## Dimension adjust
Xn_trn, Xn_val, Xn_tst, Xa_trn, Xa_tst = np.expand_dims(Xn_trn, axis = 3), np.expand_dims(Xn_val, axis = 3), np.expand_dims(Xn_tst, axis = 3),\
   np.expand_dims(Xa_trn, axis = 3), np.expand_dims(Xa_tst, axis = 3)
print_red('Data ready!')

# create the graph
scope = 'base'
x = tf.placeholder("float", shape=[None, img_size, img_size, 1])
is_training = tf.placeholder_with_default(False, (), 'is_training')
if version == 1 or version == 2:
    h1, h2, y = auto_encoder(x,
                             nb_cnn=nb_cnn,
                             bn=batch_norm,
예제 #2
0
                                             normal=test,
                                             anomaly=test,
                                             version=version2)
_, _, _, X_SP_tst2 = load_MRI_anomaly(docker=docker,
                                      train=train,
                                      val=val,
                                      normal=test,
                                      anomaly=test,
                                      version=version3)
_, _, _, X_SP_tst3 = load_MRI_anomaly(docker=docker,
                                      train=train,
                                      val=val,
                                      normal=test,
                                      anomaly=test,
                                      version=version4)
X_SA_tst, X_SP_tst1, X_SP_tst2, X_SP_tst3 = normalize_0_1(
    X_SA_tst), normalize_0_1(X_SP_tst1), normalize_0_1(
        X_SP_tst2), normalize_0_1(X_SP_tst3)
X_SP_tst = normalize_0_1(X_SP_tst)
## test data
Xt = np.concatenate([X_SA_tst, X_SP_tst1, X_SP_tst2, X_SP_tst3], axis=0)
X_SP_tst = np.expand_dims(X_SP_tst, axis=3)
#yt = np.concatenate([np.zeros((len(X_SA_tst),1)), np.ones((len(X_SP_tst1),1))], axis = 0).flatten()
## Dimension adjust
X_SA_tst, X_SP_tst1, X_SP_tst2, X_SP_tst3, Xt = np.expand_dims(X_SA_tst, axis = 3), np.expand_dims(X_SP_tst1, axis = 3), np.expand_dims(X_SP_tst2, axis = 3),\
   np.expand_dims(X_SP_tst3, axis = 3), np.expand_dims(Xt, axis = 3)
print_red('Data Loaded !')

Xa = load_MRI_anomaly_test(dataset='null_mixed_4x')
Xa = normalize_0_1(Xa)
Xa = np.expand_dims(Xa, axis=3)
예제 #3
0
	output_folder = '/data/results/MRI'
else:
	output_folder = './data/MRI'

## model folder
model_name = 'AE{}-{}-cn-{}-fr-{}-ks-{}-bn-{}-lr-{}-stps-{}-bz-{}-tr-{}k-vl-{}-test-{}-l-{}'.format(version, os.path.basename(output_folder), nb_cnn, filters, kernel_size, batch_norm, lr, nb_steps, batch_size, int(train/1000), val, test, loss)
model_folder = os.path.join(output_folder, model_name)
generate_folder(model_folder)

#image size
img_size = 256
## load dataset
print_red('Data loading ...')
X_SA_trn, X_SA_val, X_SA_tst, X_SP_tst = load_MRI_anomaly(docker = docker, train = train, val = val, normal = test, anomaly = test)
print_red('Data 0-1 normalization ...')
X_SA_trn, X_SA_val, X_SA_tst = normalize_0_1(X_SA_trn), normalize_0_1(X_SA_val), normalize_0_1(X_SA_tst)
## Dimension adjust
X_SA_trn, X_SA_val, X_SA_tst, X_SP_tst = np.expand_dims(X_SA_trn, axis = 3), np.expand_dims(X_SA_val, axis = 3), np.expand_dims(X_SA_tst, axis = 3),\
		 np.expand_dims(X_SP_tst, axis = 3)
print_red('Data ready!')

# create the graph
scope = 'base'
x = tf.placeholder("float", shape=[None, img_size, img_size, 1])
is_training = tf.placeholder_with_default(False, (), 'is_training')
if version == 1 or version ==2:
	h1, h2, y = auto_encoder(x, nb_cnn = nb_cnn, bn = batch_norm, bn_training = is_training, filters = filters, kernel_size = [kernel_size, kernel_size], scope_name = scope)
elif version == 3:
	h1, h2, y = auto_encoder2(x, nb_cnn = nb_cnn, bn = batch_norm, filters = filters, kernel_size = [kernel_size, kernel_size], scope_name = scope)

# create a saver
예제 #4
0
    elif splits[i] == 'n' or splits[i] == 'NL':
        noise = float(splits[i + 1])
    elif splits[i] == 'l':
        loss = splits[i + 1]

model_folder = os.path.join(output_folder, model_name)
dim = 256
## load data
print_red('Data loading ...')
Xn = load_MRI_anomaly_test(dataset='true')
Xa2, Xa3, Xa4 = load_MRI_anomaly_test(
    dataset='meas_2x'), load_MRI_anomaly_test(
        dataset='meas_3x'), load_MRI_anomaly_test(dataset='meas_4x')
Xam2, Xam4 = load_MRI_anomaly_test(
    dataset='null_mixed_2x'), load_MRI_anomaly_test(dataset='null_mixed_4x')
Xn, Xa2, Xa3, Xa4, Xam2, Xam4 = normalize_0_1(Xn), normalize_0_1(
    Xa2), normalize_0_1(Xa3), normalize_0_1(Xa4), normalize_0_1(
        Xam2), normalize_0_1(Xam4)

# create a graph
scope = 'base'
x = tf.placeholder("float", shape=[None, dim, dim, 1])
is_training = tf.placeholder_with_default(False, (), 'is_training')

if version == 1 or version == 2:
    h1, h2, y = auto_encoder(x,
                             nb_cnn=nb_cnn,
                             bn=batch_norm,
                             bn_training=is_training,
                             filters=filters,
                             kernel_size=[kernel_size, kernel_size],