def __init__(self, train_proj, train_vol, test_proj, test_vol, sess): self.train_proj_fns_init = tf.placeholder(tf.string, shape=(len(train_proj))) self.train_vol_fns_init = tf.placeholder(tf.string, shape=(len(train_vol))) self.test_proj_fns_init = tf.placeholder(tf.string, shape=(len(test_proj))) self.test_vol_fns_init = tf.placeholder(tf.string, shape=(len(test_vol))) self.train_proj_fns = tf.Variable(self.train_proj_fns_init, trainable=False, collections=[]) self.train_vol_fns = tf.Variable(self.train_vol_fns_init, trainable=False, collections=[]) self.test_proj_fns = tf.Variable(self.test_proj_fns_init, trainable=False, collections=[]) self.test_vol_fns = tf.Variable(self.test_vol_fns_init, trainable=False, collections=[]) sess.run(self.train_proj_fns.initializer, feed_dict={self.train_proj_fns_init: train_proj}) sess.run(self.train_vol_fns.initializer, feed_dict={self.train_vol_fns_init: train_vol}) sess.run(self.test_proj_fns.initializer, feed_dict={self.test_proj_fns_init: test_proj}) sess.run(self.test_vol_fns.initializer, feed_dict={self.test_vol_fns_init: test_vol}) geom, angles = projtable.read(DATA_P + 'projMat.txt') re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) geom_la = geom[0:LIMITED_ANGLE_SIZE] with tf.device("/cpu:0"): train, train_label, test, test_label = input_pipeline( self.train_proj_fns, self.train_vol_fns, self.test_proj_fns, self.test_vol_fns) self.test_label = test_label if not tf.train.get_global_step(): tf.train.create_global_step() self.train_op = self.train_on_projections(train, train_label, re, geom_la) self.test_vol = re.apply(test, geom_la) with tf.device("/cpu:0"): self.test_loss = tf.losses.mean_squared_error( test_label, self.test_vol)
def psi_self(expect_score, acture_score, length=10, method='step', return_detail=False): import math labels = ['c' + str(i) for i in range(length)] if method == 'step': expt_out, bins = pd.cut(expect_score, retbins=True, labels=labels, bins=length) else: expt_out, bins = pd.qcut(expect_score, retbins=True, labels=labels, q=length) bins[0] = min(acture_score) - 1 bins[length] = max(acture_score) + 1 acture_out = pd.cut(acture_score, bins=bins, labels=labels) re = pd.DataFrame({ 'expect': expt_out.value_counts(), 'acture': acture_out.value_counts() }) re['expect%'] = re['expect'] / re['expect'].sum() re['acture%'] = re['acture'] / re['acture'].sum() re['expect%'] = re['expect%'].map(lambda x: 0.0001 if x == 0 else x) re['acture%'] = re['acture%'].map(lambda x: 0.0001 if x == 0 else x) re['psi'] = re.apply(lambda x: ((x['acture%'] - x['expect%']) * (math.log( (x['acture%'] / x['expect%'])))), axis=1) psi = re.psi.sum() if return_detail: return re, psi else: return psi
def my_train(): EPOCHS = 10 BATCH_SIZE = 16 train_proj = [ "/home/davi/Documentos/ConeDeepLearningCT2/phantoms/lowdose/binary0.proj.bin", ] train_label = [ "/home/davi/Documentos/ConeDeepLearningCT2/phantoms/lowdose/binary0.vol.bin", ] save_path = '/home/davi/Documentos/train/model_%d/' % 0 with tf.Session() as sess: train_list = [] label_list = [] for i in range(len(train_proj)): train_list_ = sess.run(dennerlein.read_noqueue(train_proj[i])) train_list.append(train_list_) label_list_ = sess.run(dennerlein.read_noqueue(train_label[i])) label_list.append(label_list_) geom, angles = projtable.read(DATA_P + 'projMat.txt') # Beleza, temos as amostras em train_proj e os labels em train_label BATCH_SIZE = 1 features, labels = (train_list, label_list) dataset = tf.data.Dataset.from_tensor_slices( (np.asarray(features), np.asarray(labels))).repeat().batch(BATCH_SIZE) iter = dataset.make_one_shot_iterator() x, y = iter.get_next() EPOCHS = 100000 with tf.Session(config=tf.ConfigProto(gpu_options=GPU_OPTIONS)) as sess: global LEARNING_RATE # Ajeita as coisas para printar um slice antes de comçar a treinar re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], DISPLACEMENT, name='LAReconstructor', weights_type=WEIGHTS_TYPE) volume_la = re.apply(x, geom) # Esse reconstructor será utilizado para corrigir a imagem durante as EPOCHS do treinamento re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], DISPLACEMENT, trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) if not tf.train.get_global_step(): tf.train.create_global_step() sess.run(tf.global_variables_initializer()) sess.run(tf.local_variables_initializer()) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) saver = tf.train.Saver(max_to_keep=TRACK_LOSS) # Printa write_png = png.writeSlice( volume_la[0], '/home/davi/Documentos/train/test_model_0/slice_qualquer_antes_treinamento.png' ) sess.run(write_png) try: for i in range(EPOCHS): # if i % 30 == 0: # print("Abaixando a LEARNING RATE") # LEARNING_RATE /= 10 proj = train_list[0] label = label_list[0] train_step = tf.no_op() volume_la = re.apply(x, geom) volume_la = tf.expand_dims(volume_la, axis=0) t1 = tf.math.reduce_max(y) t2 = tf.math.reduce_max(volume_la) label_ = y * (255 / t1) volume_la_ = volume_la * (255 / t2) # Add a extension to the tensor write_png = png.writeSlice( volume_la_[0][0], '/home/davi/Documentos/train/test_model_0/slice_qualquer_depois_treinamento.png' ) sess.run(write_png) loss = tf.losses.mean_squared_error(label_, volume_la_) with tf.control_dependencies([train_step]): gstep = tf.train.get_global_step() train_step = tf.train.GradientDescentOptimizer( LEARNING_RATE).minimize( loss, colocate_gradients_with_ops=True, global_step=gstep) step = sess.run(tf.train.get_global_step()) if step % 10: print("Salvando o modelo") saver.save(sess, save_path + 'model', global_step=step) # Treinando print("Treinando") sess.run(train_step) except tf.errors.OutOfRangeError: print('Done.') finally: coord.request_stop()
def __init__(self, train_proj, train_vol, test_proj, test_vol, sess): self.train_proj_fns_init = tf.placeholder(tf.string, shape=(len(train_proj))) self.train_vol_fns_init = tf.placeholder(tf.string, shape=(len(train_vol))) self.test_proj_fns_init = tf.placeholder(tf.string, shape=(len(test_proj))) self.test_vol_fns_init = tf.placeholder(tf.string, shape=(len(test_vol))) self.train_proj_fns = tf.Variable(self.train_proj_fns_init, trainable=False, collections=[]) self.train_vol_fns = tf.Variable(self.train_vol_fns_init, trainable=False, collections=[]) self.test_proj_fns = tf.Variable(self.test_proj_fns_init, trainable=False, collections=[]) self.test_vol_fns = tf.Variable(self.test_vol_fns_init, trainable=False, collections=[]) sess.run(self.train_proj_fns.initializer, feed_dict={self.train_proj_fns_init: train_proj}) sess.run(self.train_vol_fns.initializer, feed_dict={self.train_vol_fns_init: train_vol}) sess.run(self.test_proj_fns.initializer, feed_dict={self.test_proj_fns_init: test_proj}) sess.run(self.test_vol_fns.initializer, feed_dict={self.test_vol_fns_init: test_vol}) geom, angles = projtable.read(DATA_P + 'projMat.txt') self.geom = geom re = ct.Reconstructor(CONF_LA, angles[0:LIMITED_ANGLE_SIZE], DISPLACEMENT, trainable=True, name='LAReconstructor', weights_type=WEIGHTS_TYPE) geom_la = geom[0:LIMITED_ANGLE_SIZE] with tf.device("/cpu:0"): train, train_label, test, test_label = input_pipeline( self.train_proj_fns, self.train_vol_fns, self.test_proj_fns, self.test_vol_fns) self.test = test self.test_label = test_label self.train_proj = train self.train_label = train_label self.re = re # Salva as projeções como uma variável para eu tentar salvá-las como png e ver se está no formato correto self.train_data = train if not tf.train.get_global_step(): tf.train.create_global_step() self.train_op = self.train_on_projections(train, train_label, re, geom_la) self.test_vol = re.apply(test, geom_la) with tf.device("/cpu:0"): #self.test_loss = png.writeSlice(test_label[0], '/media/davi/526E10CC6E10AAAD/mestrado_davi/train/test_model_0/test_label__.png') #self.test_loss = tf.Print(test_label, [test_label], summarize=300000) # Vamos normalizar os valores para não dar problema com a LOSS t1 = tf.math.reduce_max(test_label) t2 = tf.math.reduce_max(self.test_vol) #max = tf.math.reduce_max([t1, t2]) test_label_ = test_label * (254 / t1) test_vol_ = self.test_vol * (254 / t2) # a = np.ones((1, 500, 500)) * 1000000000000 # train_label_ = tf.add(train_label, a) # test_vol_ = tf.add(self.test_vol, a) # self.test_loss = tf.losses.mean_squared_error( train_label_, test_vol_ ) # t1 = test_label #tf.slice(test_label, [0,0,225], [1,50,50]) # t2 = self.test_vol #tf.slice(self.test_vol, [0, 0, 225], [1,50,50]) # test_label_ = tf.Print(t1, [tf.reduce_max(t1)], message="\n\nValor maximo do TEST_LABEL: ", summarize=2500) # test_label_ = tf.Print(test_label_, [tf.reduce_min(t1)], message="Valor minimo do TEST_LABEL: ", summarize=2500) # test_label_ = tf.Print(test_label_, [tf.reduce_mean(t1)], message="Valor MEDIO do TEST_LABEL: \n\n", summarize=2500) # test_vol_ = tf.Print(t2, [tf.reduce_max(t2)], message="Valor maximo do TEST_VOL: ", summarize=2500) # test_vol_ = tf.Print(test_vol_, [tf.reduce_min(t2)], message="Valor minimo do TEST_VOL: ", summarize=2500) # test_vol_ = tf.Print(test_vol_, [tf.reduce_mean(t2)], message="Valor medio do TEST_VOL: ", summarize=2500) # x_as_string = tf.map_fn(lambda xi: tf.strings.format('{}', xi), test_label_, dtype=tf.string) # test_label_ = tf.io.write_file("ARQUIVO_CONTENDO_O_TEST_LABEL.txt", x_as_string[0], name=None) # np.savetxt('test.out', tf.Session().run(t1), delimiter=',') # self.test_loss = tf.losses.mean_squared_error( test_label, self.test_vol ) # self.test_loss = self.mean_error(self.test_vol, test_label) #test_label = tf.Print(test_label, [test_label[0, 225:275, 0:50].shape], message="\n\n\nAmostra do test") self.test_loss = self.mean_error(test_label, self.test_vol) # self.test_loss = tf.losses.mean_squared_error(test_label, self.test_vol) self.test_loss_summary = tf.summary.scalar("validation_loss", self.test_loss)