Пример #1
0
model_dir = '../../data/Training_result/'
folder_name = 'ShallowNN_0'

############### loading parameters
model_var = np.load(model_dir + folder_name + '/var.npz')
pad_x = model_var['pad_x']
pad_y = model_var['pad_y']
pad_z = model_var['pad_z']

################### loading model
sess = tf.Session()
saver = tf.train.import_meta_graph(model_dir + folder_name + "/model.meta")
saver.restore(sess, model_dir + folder_name + "/model")
#####################
graph = tf.get_default_graph()
x = graph.get_tensor_by_name("x:0")
y_ = graph.get_tensor_by_name("y_:0")
keep_prob = graph.get_tensor_by_name("keep_prob:0")
y = graph.get_tensor_by_name("y:0")

################### loading test data
test_IM, test_label = cf.get_data(data_dir + test_IM_name,
                                  data_dir + test_label_name)

####################### testing
test_output = AppCCIm_test(sess, test_IM, pad_x, pad_y, pad_z)

####################### showing test result
cf.plot_result(test_IM, test_output, test_label)
Пример #2
0
valid_IM_name = 'image_4.tif'
valid_label_name = 'label_4.tif'

model_dir = '../../data/Training_result/'
folder_name = 'MultilayerNN_1'
cf.createFolder(model_dir + folder_name)

###################
win_x = out_x + 2 * pad_x
win_y = out_y + 2 * pad_y
win_z = out_z + 2 * pad_z

###################
IM_size_list = [None] * len(train_IM_list)
for i in range(len(train_IM_list)):
    phantom_IM, phantom_label = cf.get_data(data_dir + train_IM_list[i],
                                            data_dir + train_label_list[i])
    IM_size_list[i] = phantom_IM.size
N_total = sum(IM_size_list)
IM_ind = list(range(len(train_IM_list)))

################### creating network
# placeholders for the images
x = tf.placeholder(tf.float32, shape=[None, win_x * win_y * win_z], name="x")
y_ = tf.placeholder(tf.float32, shape=[None, out_x * out_y * out_z], name="y_")

# placeholder for dropout
keep_prob = tf.placeholder(tf.float32, name="keep_prob")
# first fully connected layer with 50 neurons using tanh activation
W1 = weight_variable([win_x * win_y * win_z, 100])
b1 = bias_variable([100])
h1 = tf.nn.relu(tf.matmul(x, W1) + b1)
Пример #3
0
if num_data == None:
    num_data = MAX_NUM_DATA

ids = pickle.load(open('saves/ids/ids_shuffled.pkl', "rb"))

val_start = int((1 - (validation_prop + test_prop)) * num_data)
test_start = int((1 - test_prop) * num_data)
train_ids, val_ids, test_ids = ids[:val_start], ids[val_start:test_start], ids[
    test_start:]

resolution, batch_size = 149, 32
network_dir_name = 'Inception-v3'

ram = True

names, labels, images, paths = get_data(resolution, ram, num_data)
data = (names, labels, images, paths)
num_data = len(names)

test_gen = get_generator(
    data,
    test_ids,
    resolution,
    batch_size,
    ram,
)

tf.reset_default_graph()

if tf.test.is_gpu_available():
    device = "/gpu:0"