tf.summary.scalar('loss', loss) tf.summary.scalar('acc', acc) merged = tf.summary.merge_all() def read_dcm(names, raw=False): if raw: img = sitk.ReadImage(names) else: names = sitk.ImageSeriesReader_GetGDCMSeriesFileNames(names) img = sitk.ReadImage(names) return sitk.GetArrayFromImage(img) data = Data(path, BLOCK_SIZE) if __name__ == '__main__': with tf.Session(graph=g) as sess: saver = tf.train.Saver() tf.global_variables_initializer().run() key = 0.0045 # 0.005 summary_writer = tf.summary.FileWriter('./summary', graph=sess.graph) w = [0.1, 0.2, 0.3, 0.4] ans3 = 1000 # cv2.imwrite('./prediction/test_.jpg', np.uint8( # (pic[0, :, :, 0] < pic[0, :, :, 1])) * 255) count = 0 iteration = 0 while iteration < 10000: try: try:
learning_rate=rates, momentum=0.2).minimize(loss=loss, global_step=g_steps) with tf.variable_scope('evaluate'): pre_img = tf.argmax(pre, -1) ans = tf.equal(pre_img, Y) acc = tf.reduce_mean(tf.cast(ans, tf.float32)) # VD = Metrics.VD(pre, Y) # VOE = Metrics.VOE(pre, Y) tf.summary.scalar('loss', loss) tf.summary.scalar('acc', acc) merged = tf.summary.merge_all() pass data = Data(path, BLOCK_SIZE) if __name__ == '__main__': with tf.Session(graph=g) as sess: saver = tf.train.Saver() # saver.restore(sess, './test_model_save_3/test.ckpt') tf.global_variables_initializer().run() key = 0.0045 # 0.005 # sess.run(tf.assign(g_steps, 0)) summary_writer = tf.summary.FileWriter('./summary', graph=sess.graph) w = [0.1, 0.2, 0.3, 0.4] ans3 = 1000 # cv2.imwrite('./prediction/test_.jpg', np.uint8( # (pic[0, :, :, 0] < pic[0, :, :, 1])) * 255) count = 0 iteration = 0 while iteration < 100000:
steps = 1000 g_steps = tf.Variable(0) rates = tf.train.exponential_decay(0.2, g_steps, 200, 0.95, staircase=True) # train = tf.train.GradientDescentOptimizer(rates).minimize(loss, global_step=g_steps) train1 = tf.train.MomentumOptimizer( learning_rate=rates, momentum=0.2).minimize(loss=loss_g, global_step=g_steps) train2 = tf.train.MomentumOptimizer( learning_rate=rates, momentum=0.2).minimize(loss=loss_d, global_step=g_steps, var_list=d_vars) tf.summary.scalar('loss_d', loss_d) tf.summary.scalar('loss_g', loss_g) merged = tf.summary.merge_all() data = Data(path, BLOCK_SIZE, stride) if __name__ == '__main__': with tf.Session(graph=g) as sess: saver = tf.train.Saver() saver.restore(sess, './test_model_save4000/test.ckpt') key = 0.0045 # 0.005 summary_writer = tf.summary.FileWriter('./summary_1', graph=sess.graph) w = [1, 2, 3, 4] count = 0 iteration = 0 while iteration < 10000: # try: try: x, y = data.next() except Exception as e:
g_steps = tf.Variable(0) rates = tf.train.exponential_decay(0.2, g_steps, 200, 0.95, staircase=True) # train = tf.train.GradientDescentOptimizer(rates).minimize(loss, global_step=g_steps) train1 = tf.train.MomentumOptimizer( learning_rate=rates, momentum=0.2).minimize(loss=loss_g, global_step=g_steps) train2 = tf.train.MomentumOptimizer( learning_rate=rates, momentum=0.2).minimize(loss=loss_d, global_step=g_steps, var_list=d_vars) tf.summary.scalar('loss_d', loss_d) tf.summary.scalar('loss_g', loss_g) merged = tf.summary.merge_all() data = Data(path, BLOCK_SIZE, stride) if __name__ == '__main__': with tf.Session(graph=g) as sess: tf.global_variables_initializer().run() x = np.zeros([1] + BLOCK_SIZE + [1]) # graph = tf.get_default_graph() # tensor = graph.get_tensor_by_name('generator/block10/up/Relu:0') ans = sess.run([tf.shape(monitor[0]), tf.shape(supervise_stream[0])], feed_dict={X: x}) # print(main_stream[10].name) print(ans) pass