def update_labels(): geom, angles = projtable.read(DATA_P + 'projMat.txt') ref_reconstructor = ct.Reconstructor(CONF, angles, DISPLACEMENT, name='RefReconstructor') with tf.Session(config=tf.ConfigProto(gpu_options=GPU_OPTIONS)) as sess: sess.run(tf.global_variables_initializer()) sess.run(tf.local_variables_initializer()) for fn_proj in PROJ_FILES: fn_vol = fn_proj.replace('proj', 'vol') if not os.path.exists(fn_vol): print('Creating label for %s' % fn_proj) sess.run(create_label(fn_proj, fn_vol, ref_reconstructor, geom)) VOL_FILES.append(fn_vol) sess.close() tf.reset_default_graph() PROJ_FILES.sort() VOL_FILES.sort()
def __init__(self, train_proj, train_vol, test_proj, test_vol, sess): self.train_proj_fns_init = tf.placeholder(tf.string, shape=(len(train_proj))) self.train_vol_fns_init = tf.placeholder(tf.string, shape=(len(train_vol))) self.test_proj_fns_init = tf.placeholder(tf.string, shape=(len(test_proj))) self.test_vol_fns_init = tf.placeholder(tf.string, shape=(len(test_vol))) self.train_proj_fns = tf.Variable(self.train_proj_fns_init, trainable=False, collections=[]) self.train_vol_fns = tf.Variable(self.train_vol_fns_init, trainable=False, collections=[]) self.test_proj_fns = tf.Variable(self.test_proj_fns_init, trainable=False, collections=[]) self.test_vol_fns = tf.Variable(self.test_vol_fns_init, trainable=False, collections=[]) sess.run(self.train_proj_fns.initializer, feed_dict={self.train_proj_fns_init: train_proj}) sess.run(self.train_vol_fns.initializer, feed_dict={self.train_vol_fns_init: train_vol}) sess.run(self.test_proj_fns.initializer, feed_dict={self.test_proj_fns_init: test_proj}) sess.run(self.test_vol_fns.initializer, feed_dict={self.test_vol_fns_init: test_vol}) geom, angles = projtable.read(DATA_P + 'projMat.txt') re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) geom_la = geom[0:LIMITED_ANGLE_SIZE] with tf.device("/cpu:0"): train, train_label, test, test_label = input_pipeline( self.train_proj_fns, self.train_vol_fns, self.test_proj_fns, self.test_vol_fns) self.test_label = test_label if not tf.train.get_global_step(): tf.train.create_global_step() self.train_op = self.train_on_projections(train, train_label, re, geom_la) self.test_vol = re.apply(test, geom_la) with tf.device("/cpu:0"): self.test_loss = tf.losses.mean_squared_error( test_label, self.test_vol)
def minha_funcao_pessoal2(): with tf.Session() as sess: train_writer = tf.summary.FileWriter('./logs/1/train ', sess.graph) sets = split_train_validation_set(0) m = Model(*sets, sess) sess.run(tf.global_variables_initializer()) sess.run(tf.local_variables_initializer()) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) """ write_png = png.writeSlice( m.train_proj[0], '/media/davi/526E10CC6E10AAAD/mestrado_davi/train/test_model_0/uma_projecao_do_train.png' ) sess.run(write_png) #volume_la = sess.run(volume_la) write_png = png.writeSlice( volume_la[0], '/media/davi/526E10CC6E10AAAD/mestrado_davi/train/test_model_0/uma_slice_do_train.png' ) sess.run(write_png) write_png = png.writeSlice( m.test_label[0], '/media/davi/526E10CC6E10AAAD/mestrado_davi/train/test_model_0/test_label_0.png' ) sess.run(write_png) """ optimizer = tf.train.GradientDescentOptimizer(LEARNING_RATE) for i in range(100): merge = tf.summary.merge_all() summary = sess.run(merge) volume_la = m.re.apply(m.train_proj, m.geom) t1 = tf.math.reduce_max(m.train_label) t2 = tf.math.reduce_max(volume_la) train_label_ = m.train_label * (254 / t1) volume_la_ = volume_la * (254 / t2) loss = tf.losses.mean_squared_error(train_label_, volume_la_) resultado_loss = sess.run(loss) print(resultado_loss) tf.summary.histogram("resultado_loss", resultado_loss) train_step = optimizer.minimize(loss) #train_step = tf.contrib.layers.optimize_loss(loss, tf.train.get_global_step(), learning_rate=LEARNING_RATE, optimizer='Adam', summaries=["gradients"]) #train_step = tf.contrib.slim.learning.create_train_op(loss, optimizer, summarize_gradients=True) sess.run(train_step) train_writer.add_summary(summary, i) log_dir = "/home/davi/Documentos/train/test_model_0/" write_png = png.writeSlice(m.test_vol[0], log_dir + 'slice_label_' + str(i) + '.png') sess.run(write_png) exit() save_path = '/home/davi/Documentos/train/model_%d/' % 0 geom, angles = projtable.read(DATA_P + 'projMat.txt') reconstructor = ct.Reconstructor(CONF_LA, angles[0:15], DISPLACEMENT, trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) volume_la = reconstructor.apply(train_proj, geom)
def my_train(): EPOCHS = 10 BATCH_SIZE = 16 train_proj = [ "/home/davi/Documentos/ConeDeepLearningCT2/phantoms/lowdose/binary0.proj.bin", ] train_label = [ "/home/davi/Documentos/ConeDeepLearningCT2/phantoms/lowdose/binary0.vol.bin", ] save_path = '/home/davi/Documentos/train/model_%d/' % 0 with tf.Session() as sess: train_list = [] label_list = [] for i in range(len(train_proj)): train_list_ = sess.run(dennerlein.read_noqueue(train_proj[i])) train_list.append(train_list_) label_list_ = sess.run(dennerlein.read_noqueue(train_label[i])) label_list.append(label_list_) geom, angles = projtable.read(DATA_P + 'projMat.txt') # Beleza, temos as amostras em train_proj e os labels em train_label BATCH_SIZE = 1 features, labels = (train_list, label_list) dataset = tf.data.Dataset.from_tensor_slices( (np.asarray(features), np.asarray(labels))).repeat().batch(BATCH_SIZE) iter = dataset.make_one_shot_iterator() x, y = iter.get_next() EPOCHS = 100000 with tf.Session(config=tf.ConfigProto(gpu_options=GPU_OPTIONS)) as sess: global LEARNING_RATE # Ajeita as coisas para printar um slice antes de comçar a treinar re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], DISPLACEMENT, name='LAReconstructor', weights_type=WEIGHTS_TYPE) volume_la = re.apply(x, geom) # Esse reconstructor será utilizado para corrigir a imagem durante as EPOCHS do treinamento re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], DISPLACEMENT, trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) if not tf.train.get_global_step(): tf.train.create_global_step() sess.run(tf.global_variables_initializer()) sess.run(tf.local_variables_initializer()) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) saver = tf.train.Saver(max_to_keep=TRACK_LOSS) # Printa write_png = png.writeSlice( volume_la[0], '/home/davi/Documentos/train/test_model_0/slice_qualquer_antes_treinamento.png' ) sess.run(write_png) try: for i in range(EPOCHS): # if i % 30 == 0: # print("Abaixando a LEARNING RATE") # LEARNING_RATE /= 10 proj = train_list[0] label = label_list[0] train_step = tf.no_op() volume_la = re.apply(x, geom) volume_la = tf.expand_dims(volume_la, axis=0) t1 = tf.math.reduce_max(y) t2 = tf.math.reduce_max(volume_la) label_ = y * (255 / t1) volume_la_ = volume_la * (255 / t2) # Add a extension to the tensor write_png = png.writeSlice( volume_la_[0][0], '/home/davi/Documentos/train/test_model_0/slice_qualquer_depois_treinamento.png' ) sess.run(write_png) loss = tf.losses.mean_squared_error(label_, volume_la_) with tf.control_dependencies([train_step]): gstep = tf.train.get_global_step() train_step = tf.train.GradientDescentOptimizer( LEARNING_RATE).minimize( loss, colocate_gradients_with_ops=True, global_step=gstep) step = sess.run(tf.train.get_global_step()) if step % 10: print("Salvando o modelo") saver.save(sess, save_path + 'model', global_step=step) # Treinando print("Treinando") sess.run(train_step) except tf.errors.OutOfRangeError: print('Done.') finally: coord.request_stop()
def __init__(self, train_proj, train_vol, test_proj, test_vol, sess): self.train_proj_fns_init = tf.placeholder(tf.string, shape=(len(train_proj))) self.train_vol_fns_init = tf.placeholder(tf.string, shape=(len(train_vol))) self.test_proj_fns_init = tf.placeholder(tf.string, shape=(len(test_proj))) self.test_vol_fns_init = tf.placeholder(tf.string, shape=(len(test_vol))) self.train_proj_fns = tf.Variable(self.train_proj_fns_init, trainable=False, collections=[]) self.train_vol_fns = tf.Variable(self.train_vol_fns_init, trainable=False, collections=[]) self.test_proj_fns = tf.Variable(self.test_proj_fns_init, trainable=False, collections=[]) self.test_vol_fns = tf.Variable(self.test_vol_fns_init, trainable=False, collections=[]) sess.run(self.train_proj_fns.initializer, feed_dict={self.train_proj_fns_init: train_proj}) sess.run(self.train_vol_fns.initializer, feed_dict={self.train_vol_fns_init: train_vol}) sess.run(self.test_proj_fns.initializer, feed_dict={self.test_proj_fns_init: test_proj}) sess.run(self.test_vol_fns.initializer, feed_dict={self.test_vol_fns_init: test_vol}) geom, angles = projtable.read(DATA_P + 'projMat.txt') self.geom = geom re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], DISPLACEMENT, trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) geom_la = geom[0:LIMITED_ANGLE_SIZE] with tf.device("/cpu:0"): train, train_label, test, test_label = input_pipeline( self.train_proj_fns, self.train_vol_fns, self.test_proj_fns, self.test_vol_fns) self.test = test self.test_label = test_label self.train_proj = train self.train_label = train_label self.re = re # Salva as projeções como uma variável para eu tentar salvá-las como png e ver se está no formato correto self.train_data = train if not tf.train.get_global_step(): tf.train.create_global_step() self.train_op = self.train_on_projections(train, train_label, re, geom_la) self.test_vol = re.apply(test, geom_la) with tf.device("/cpu:0"): #self.test_loss = png.writeSlice(test_label[0], '/media/davi/526E10CC6E10AAAD/mestrado_davi/train/test_model_0/test_label__.png') #self.test_loss = tf.Print(test_label, [test_label], summarize=300000) # Vamos normalizar os valores para não dar problema com a LOSS t1 = tf.math.reduce_max(test_label) t2 = tf.math.reduce_max(self.test_vol) #max = tf.math.reduce_max([t1, t2]) test_label_ = test_label * (254 / t1) test_vol_ = self.test_vol * (254 / t2) # a = np.ones((1, 500, 500)) * 1000000000000 # train_label_ = tf.add(train_label, a) # test_vol_ = tf.add(self.test_vol, a) # self.test_loss = tf.losses.mean_squared_error( train_label_, test_vol_ ) # t1 = test_label #tf.slice(test_label, [0,0,225], [1,50,50]) # t2 = self.test_vol #tf.slice(self.test_vol, [0, 0, 225], [1,50,50]) # test_label_ = tf.Print(t1, [tf.reduce_max(t1)], message="\n\nValor maximo do TEST_LABEL: ", summarize=2500) # test_label_ = tf.Print(test_label_, [tf.reduce_min(t1)], message="Valor minimo do TEST_LABEL: ", summarize=2500) # test_label_ = tf.Print(test_label_, [tf.reduce_mean(t1)], message="Valor MEDIO do TEST_LABEL: \n\n", summarize=2500) # test_vol_ = tf.Print(t2, [tf.reduce_max(t2)], message="Valor maximo do TEST_VOL: ", summarize=2500) # test_vol_ = tf.Print(test_vol_, [tf.reduce_min(t2)], message="Valor minimo do TEST_VOL: ", summarize=2500) # test_vol_ = tf.Print(test_vol_, [tf.reduce_mean(t2)], message="Valor medio do TEST_VOL: ", summarize=2500) # x_as_string = tf.map_fn(lambda xi: tf.strings.format('{}', xi), test_label_, dtype=tf.string) # test_label_ = tf.io.write_file("ARQUIVO_CONTENDO_O_TEST_LABEL.txt", x_as_string[0], name=None) # np.savetxt('test.out', tf.Session().run(t1), delimiter=',') # self.test_loss = tf.losses.mean_squared_error( test_label, self.test_vol ) # self.test_loss = self.mean_error(self.test_vol, test_label) #test_label = tf.Print(test_label, [test_label[0, 225:275, 0:50].shape], message="\n\n\nAmostra do test") self.test_loss = self.mean_error(test_label, self.test_vol) # self.test_loss = tf.losses.mean_squared_error(test_label, self.test_vol) self.test_loss_summary = tf.summary.scalar("validation_loss", self.test_loss)