def get_result_from_log(): # ## data 2------------------------ dataname = Config.paviaU class_num = Config.paviaU_class_num # ## data 2------------------------ # ## data 2------------------------ # dataname = Config.ksc # class_num =Config.ksc_class_num # ## data 2------------------------ # log log = Config.spectral_log + dataname + '/' with tf.Graph().as_default(): pd = Data(data_name=dataname) data_sets = pd.get_train_valid_test_of_spectral_feature() # init class train_data = Data_Set([data_sets[0], data_sets[1]]) valid_data = Data_Set([data_sets[2], data_sets[3]]) test_data = Data_Set([data_sets[4], data_sets[5]]) images_placeholder, labels_placeholder = \ placeholder_inputs(input_dim=data_sets[0].shape[1]) logits = sAE.inference(images_placeholder, class_num, Config.encoder_layers) loss = sAE.loss(logits, labels_placeholder) train_op = sAE.training(loss, Config.learn_rate) eval_correct = sAE.evaluation(logits, labels_placeholder) model_file = tf.train.latest_checkpoint('./spectral_log/') saver = tf.train.Saver() with tf.Session() as sess: saver.restore(sess, model_file) for step in range(Config.max_steps): if (step + 1) % 100 == 0 or (step + 1) == Config.max_steps: # checkpoint_file = os.path.join(log, 'valid_model.ckpt') # saver.save(sess, checkpoint_file, global_step=step) print(' Valid data evaluation') do_eval(sess, eval_correct, images_placeholder, labels_placeholder, valid_data) if (step + 1) % 100 == 0 or (step + 1) == Config.max_steps: # checkpoint_file = os.path.join(log, 'test_model.ckpt') # tf.saver.save(sess, checkpoint_file, global_step=step) print(' Test data evaluation') do_eval(sess, eval_correct, images_placeholder, labels_placeholder, test_data)
def run_trainning(): # # ## data 1------------------------ #dataname = Config.paviaU # class_num =Config.paviaU_class_num # # ## data 1------------------------ ## data 2------------------------ dataname = Config.ksc #class_num =Config.ksc_class_num ## data 2------------------------ ## data 2------------------------ dataname = Config.Salinas # class_num =Config.ksc_class_num ## data 2------------------------ # log spatial_log='/'+dataname+'/'+'spatial' final_ckpt_dir=Config.final_ckpt_dir+spatial_log if not os.path.exists(final_ckpt_dir): os.makedirs(final_ckpt_dir) pre_train_ckpt_dir = Config.pretrain_ckpt+spatial_log if not os.path.exists(pre_train_ckpt_dir): os.makedirs(pre_train_ckpt_dir) pre_need_train = True final_need_train = True with tf.Graph().as_default() as gad: pd = Data(data_name = dataname) data_sets = pd.get_train_valid_test_of_spatial_feature() # init class train_data = Data_Set([data_sets[0],data_sets[1]]) valid_data = Data_Set([data_sets[2],data_sets[3]]) test_data = Data_Set([data_sets[4],data_sets[5]]) with tf.Session(graph = gad) as sess: input_dim = [train_data.feature_dim,Config.batch_size] epoch_size = data_sets[0].shape[0] # int sae = new_SAE(input_dim, encoder_shape=Config.encoder_layers) pre_trian_saver = tf.train.Saver() # after graph, sometime with variable name ,so so fianl_saver = tf.train.Saver() # after graph, sometime with variable name ,so so writer = tf.summary.FileWriter(final_ckpt_dir,gad) init = tf.global_variables_initializer() # order is important,after graph sess.run(init) #after graph # if pretrain ready if pre_need_train == True: path = os.path.join(pre_train_ckpt_dir,'SAE_graph') sae.pre_train(train_data,sess,path) pre_trian_saver.save(sess, os.path.join(pre_train_ckpt_dir, 'pre_train_model.ckpt')) else: ckpt = tf.train.get_checkpoint_state(pre_train_ckpt_dir) pre_trian_saver.restore(sess, ckpt.model_checkpoint_path) # train real model if final_need_train == True: for step in range(int(Config.epoch_final_train_times * epoch_size / Config.batch_size)): start_time = time.time() feed_dict = fill_feed_dict(train_data, sae.x, sae.y) cost,_= sae.train_final_model(feed_dict,sess) duration = time.time() - start_time if step % 500 == 0: print('final model train : Step %d: loss = %.2f(%.3f sec)' % (step, cost, duration)) summary_ = sess.run(sae.final_merged, feed_dict=feed_dict) writer.add_summary(summary_,step) fianl_saver.save(sess, os.path.join(final_ckpt_dir, 'final_model.ckpt')) print(' Train data evaluation') do_eval(sess, sae.correct, sae.x, sae.y, train_data) if (step + 1) % 500 == 0 or (step + 1) == Config.max_steps: print(' Valid data evaluation') do_eval(sess, sae.correct, sae.x, sae.y, valid_data) if (step + 1) % 500 == 0 or (step + 1) == Config.max_steps:\ print(' Test data evaluation') do_eval(sess, sae.correct, sae.x, sae.y, test_data) else: ckpt = tf.train.get_checkpoint_state(final_ckpt_dir) fianl_saver.restore(sess, ckpt.model_checkpoint_path) # v = tf.get_collection(tf.GraphKeys.VARIABLES) print(' Train data evaluation') do_eval(sess, sae.correct, sae.x, sae.y, train_data) print(' Valid data evaluation') do_eval(sess, sae.correct, sae.x, sae.y, valid_data) print(' Test data evaluation') do_eval(sess, sae.correct, sae.x, sae.y, test_data)
# ## data 1------------------------ dataname = Config.paviaU class_num = Config.paviaU_class_num # ## data 1------------------------ # set log dir log = './test/' with tf.Graph().as_default() as gr: pd = Data(dataname) data_sets = pd.get_train_valid_test_of_spectral_feature() # init data train_data = Data_Set([data_sets[0], data_sets[1]]) valid_data = Data_Set([data_sets[2], data_sets[3]]) test_data = Data_Set([data_sets[4], data_sets[5]]) with tf.Session(graph=gr) as sess: img_placeholder = tf.placeholder( tf.float32, [None, data_sets[0].shape[1]]) ## important to save real data sae = new_SAE(img_placeholder, encoder_shape=[60, 60, 60, 60], sess=sess) saver = tf.train.Saver( ) # after graph, sometime with variable name ,so so init = tf.global_variables_initializer( ) # order is important,after graph
def run_trainning(): # ## data 1------------------------ # dataname = Config.ksc # class_num =Config.ksc_class_num # ## data 1------------------------ ## data 2------------------------ dataname = Config.ksc class_num = Config.ksc_class_num ## data 2------------------------ #set log dir log = Config.mix_feature_log+dataname+'/' with tf.Graph().as_default(): pd = Data(dataname) data_sets = pd.get_train_valid_test_of_mix_feature() # init class train_data = Data_Set([data_sets[0],data_sets[1]]) valid_data = Data_Set([data_sets[2],data_sets[3]]) test_data = Data_Set([data_sets[4],data_sets[5]]) images_placeholder, labels_placeholder = \ placeholder_inputs(input_dim=data_sets[0].shape[1]) logits = sAE.inference(images_placeholder,class_num,Config.encoder_layers) loss = sAE.loss(logits,labels_placeholder) train_op = sAE.training(loss,Config.learn_rate) eval_correct = sAE.evaluation(logits,labels_placeholder) summary = tf.summary.merge_all() init = tf.global_variables_initializer() saver = tf.train.Saver() sess = tf.Session() summary_writer = tf.summary.FileWriter(log,sess.graph) sess.run(init) for step in range(Config.max_steps): start_time = time.time() feed_dict = fill_feed_dict(train_data,images_placeholder,labels_placeholder) # only return loss inside run([]) _,loss_value = sess.run([train_op,loss],feed_dict=feed_dict) duration = time.time()-start_time if step%100 == 0: print('Step %d: loss = %.2f(%.3f sec)'%(step,loss_value,duration)) #print(sess.run(sAE.evaluation(logits, train_data[1]))) summary_str = sess.run(summary,feed_dict=feed_dict) summary_writer.add_summary(summary_str,step) summary_writer.flush() if(step+1)%100 == 0 or (step+1)==Config.max_steps: checkpoint_file =os.path.join(log,'valid_model.ckpt') saver.save(sess,checkpoint_file,global_step = step) print(' Valid data evaluation') do_eval(sess, eval_correct, images_placeholder, labels_placeholder, valid_data) if (step + 1) % 100 == 0 or (step + 1) == Config.max_steps: checkpoint_file = os.path.join(log, 'test_model.ckpt') saver.save(sess, checkpoint_file, global_step=step) print(' Test data evaluation') do_eval(sess, eval_correct, images_placeholder, labels_placeholder, test_data)
def get_results_from_exist_model(): # ## data 2------------------------ dataname = Config.paviaU class_num = Config.paviaU_class_num # ## data 2------------------------ # ## data 2------------------------ # dataname = Config.ksc # class_num =Config.ksc_class_num # ## data 2------------------------ #set log dir log = './'+dataname+'/'+Config.spatial_log with tf.Graph().as_default(): pd = Data(data_name=dataname) data_sets = pd.get_train_valid_test_of_spatial_feature() # init class train_data = Data_Set([data_sets[0], data_sets[1]]) valid_data = Data_Set([data_sets[2], data_sets[3]]) test_data = Data_Set([data_sets[4], data_sets[5]]) images_placeholder, labels_placeholder = \ placeholder_inputs(input_dim=data_sets[0].shape[1]) logits = sAE.inference(images_placeholder, class_num, Config.encoder_layers) loss = sAE.loss(logits, labels_placeholder) train_op = sAE.training(loss, Config.learn_rate) eval_correct = sAE.evaluation(logits, labels_placeholder) saver = tf.train.Saver() # valid_saver = tf.train.Saver() # test_saver = tf.train.Saver() with tf.Session() as sess: sess.run(tf.global_variables_initializer()) path = log+'/test/test_model.ckpt-99' model_file = tf.train.latest_checkpoint(log+'/test') saver.restore(sess, model_file) print('success') for step in range(Config.max_steps): feed_dict = fill_feed_dict(train_data,images_placeholder,labels_placeholder) # only return loss inside run([]) _,loss_value = sess.run([train_op,loss],feed_dict=feed_dict) if (step + 1) % 100 == 0 or (step + 1) == Config.max_steps: # checkpoint_file = os.path.join(log, 'valid_model.ckpt') # valid_saver.restore(sess, checkpoint_file) # #saver.save(sess, checkpoint_file, global_step=step) print(' Valid data evaluation') do_eval(sess, eval_correct, images_placeholder, labels_placeholder, valid_data) if (step + 1) % 100 == 0 or (step + 1) == Config.max_steps: # checkpoint_file1 = os.path.join(log, 'test_model.ckpt') # test_saver.restore(sess, checkpoint_file1) # # tf.saver.save(sess, checkpoint_file, global_step=step) print(' Test data evaluation') do_eval(sess, eval_correct, images_placeholder, labels_placeholder, test_data)