def main(_): print 'reading npy...' data = np.load('../data/1st.npy') #jpg_list = np.load('64bin.npy') jpg_list = np.load('../data/nlcd+vae+image64/input_images_64.npy') test_order = np.load('../data/test.npy') print 'reading finished' sess = tf.Session() print 'building network...' hg = resnet_test.resnet(is_training=False) global_step = tf.Variable(0,name='global_step',trainable=False) merged_summary = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(FLAGS.summary_dir,sess.graph) saver = tf.train.Saver(max_to_keep=None) saver.restore(sess,FLAGS.checkpoint_path) print 'restoring from '+FLAGS.checkpoint_path def test_step(): print 'Testing...' all_ce_loss = 0 all_output = [] all_label = [] batch_size = 18 for i in range(int(len(test_order)/batch_size)): input_image = get_data.get_jpg_test(jpg_list,test_order[batch_size*i:batch_size*(i+1)])/128.0 input_label = get_data.get_label(data,test_order[batch_size*i:batch_size*(i+1)]) input_nlcd = get_data.get_nlcd(data,test_order[batch_size*i:batch_size*(i+1)]) feed_dict={} feed_dict[hg.input_image]=input_image feed_dict[hg.input_label]=input_label feed_dict[hg.input_nlcd]=input_nlcd feed_dict[hg.keep_prob]=1.0 ce_loss,output= sess.run([hg.ce_loss,hg.output],feed_dict) all_ce_loss += ce_loss for i in output: all_output.append(i) for i in input_label: all_label.append(i) all_output = np.array(all_output) all_label = np.array(all_label) #average_precision = average_precision_score(all_label,all_output) loglike = all_ce_loss/(int(len(test_order)/batch_size)) np.save('output.npy',all_output) np.save('label.npy',all_label) auc = roc_auc_score(all_label,all_output) #loglike = log_likelihood(all_label,all_output) time_str = datetime.datetime.now().isoformat() tempstr = "{}: auc {:g}, log_likelihood {:g}".format(time_str, auc,loglike) print(tempstr) all_output=np.reshape(all_output,(-1)) all_label=np.reshape(all_label,(-1)) ap = average_precision_score(all_label,all_output) auc_2 = roc_auc_score(all_label,all_output) print 'ap:'+str(ap) print 'auc_2:'+str(auc_2) test_step()
def main(_): print 'reading npy...' data = np.load('../data/1st.npy') jpg_list = np.load('../data/32bin.npy') test_order = np.load('../data/test.npy') print 'reading finished' sess = tf.Session() print 'building network...' hg = resnet_test.resnet(is_training=False) global_step = tf.Variable(0,name='global_step',trainable=False) merged_summary = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(FLAGS.summary_dir,sess.graph) saver = tf.train.Saver(max_to_keep=None) saver.restore(sess,FLAGS.checkpoint_path) print 'restoring from '+FLAGS.checkpoint_path def test_step(): print 'Testing...' all_ce_loss = 0 all_output = [] all_label = [] batch_size = 18 for i in range(int(len(test_order)/batch_size)): input_image = get_data.get_jpg_test(jpg_list,test_order[batch_size*i:batch_size*(i+1)]) input_label = get_data.get_label(data,test_order[batch_size*i:batch_size*(i+1)]) feed_dict={} feed_dict[hg.input_image]=input_image feed_dict[hg.input_label]=input_label feed_dict[hg.keep_prob]=1.0 ce_loss,output= sess.run([hg.ce_loss,hg.output],feed_dict) all_ce_loss += ce_loss for i in output: all_output.append(i) for i in input_label: all_label.append(i) all_output = np.array(all_output) all_label = np.array(all_label) #average_precision = average_precision_score(all_label,all_output) loglike = all_ce_loss/(int(len(test_order)/batch_size)) np.save('output.npy',all_output) np.save('label.npy',all_label) auc = roc_auc_score(all_label,all_output) #loglike = log_likelihood(all_label,all_output) time_str = datetime.datetime.now().isoformat() tempstr = "{}: auc {:g}, log_likelihood {:g}".format(time_str, auc,loglike) print(tempstr) all_output=np.reshape(all_output,(-1)) all_label=np.reshape(all_label,(-1)) ap = average_precision_score(all_label,all_output) auc_2 = roc_auc_score(all_label,all_output) print 'ap:'+str(ap) print 'auc_2:'+str(auc_2) test_step()
o_path = os.getcwd() sys.path.append(o_path) from vgg_net import utils from IP102.dataset_ip102 import Dataset_IP102, transform file_dir = 'F:/5.datasets/ip102_v1.1/' train_dataset = Dataset_IP102(file_dir, train=True, transforms=data_tf) train_data = t.utils.data.DataLoader( train_dataset, batch_size=64, #14 shuffle=True, drop_last=True) test_dataset = Dataset_IP102(file_dir, train=False, transforms=data_tf) test_data = t.utils.data.DataLoader( test_dataset, batch_size=64, #14 shuffle=True, drop_last=True) net = resnet(3, 102) #optimizer = t.optim.SGD(net.parameters(),lr=0.001) #test adam optimizer = t.optim.Adam(net.parameters(), lr=0.01) criterion = t.nn.CrossEntropyLoss() utils.train(net, train_data, test_data, 20, optimizer, criterion, 'resnet')
def main(_): print 'reading npy...' #trainlist, labels = read_csv.train_data() #jpg_list = np.load('../jpg_1st.npy') data = np.load('../1st.npy') jpg_list = [] for i in range(len(data)): jpg_list.append(str(i) + '.jpg') #train_order = np.load('../train.npy') #test_order = np.load('../test.npy') test_order = [] for i in range(len(data)): test_order.append(i) sess = tf.Session() arg_scope = resnet_v2.resnet_arg_scope() print 'building network...' with slim.arg_scope(arg_scope): hg = resnet_test.resnet(is_training=False) init_fn = _get_init_fn() merged_summary = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(FLAGS.summary_dir, sess.graph) sess.run(tf.initialize_all_variables()) saver = tf.train.Saver(max_to_keep=None) init_fn(sess) print 'building finished' def test_step(): print 'testing...' # all_ce_loss = 0 # all_l2_loss = 0 # all_total_loss = 0 # all_output = [] # all_label = [] all_feature = [] batch_size = 17 * 3 for i in range(int(len(test_order) / batch_size)): input_image = get_data.get_jpg_test( jpg_list, test_order[batch_size * i:batch_size * (i + 1)]) input_label = get_data.get_label( data, test_order[batch_size * i:batch_size * (i + 1)]) feed_dict = {} feed_dict[hg.input_image] = input_image feature = sess.run(hg.feature, feed_dict) for i in feature: all_feature.append(i) # for i in input_label: # all_label.append(i) # all_output = np.array(all_output) # all_label = np.array(all_label) # #average_precision = average_precision_score(all_label,all_output) # np.save('output.npy',all_output) # np.save('label.npy',all_label) # auc = roc_auc_score(all_label,all_output) # loglike = log_likelihood(all_label,all_output) # time_str = datetime.datetime.now().isoformat() # tempstr = "{}: auc {:g}, log_likelihood {:g}".format(time_str, auc, loglike) # print(tempstr) # all_output=np.reshape(all_output,(-1)) # all_label=np.reshape(all_label,(-1)) # ap = average_precision_score(all_label,all_output) # auc_2 = roc_auc_score(all_label,all_output) # print 'ap:'+str(ap) # print 'auc_2:'+str(auc_2) np.save('resnet50_feature.npy', all_feature) test_step()
def main(_): print 'reading npy...' #trainlist, labels = read_csv.train_data() #jpg_list = np.load('../jpg_1st.npy') data = np.load('../1st.npy') jpg_list=[] for i in range(len(data)): jpg_list.append(str(i)+'.jpg') #train_order = np.load('../train.npy') #test_order = np.load('../test.npy') test_order = [] for i in range(len(data)): test_order.append(i) sess = tf.Session() arg_scope = resnet_v2.resnet_arg_scope() print 'building network...' with slim.arg_scope(arg_scope): hg = resnet_test.resnet(is_training=False) init_fn = _get_init_fn() merged_summary = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(FLAGS.summary_dir,sess.graph) sess.run(tf.initialize_all_variables()) saver = tf.train.Saver(max_to_keep=None) init_fn(sess) print 'building finished' def test_step(): print 'testing...' # all_ce_loss = 0 # all_l2_loss = 0 # all_total_loss = 0 # all_output = [] # all_label = [] all_feature = [] batch_size=17*3 for i in range(int(len(test_order)/batch_size)): input_image = get_data.get_jpg_test(jpg_list,test_order[batch_size*i:batch_size*(i+1)]) input_label = get_data.get_label(data,test_order[batch_size*i:batch_size*(i+1)]) feed_dict={} feed_dict[hg.input_image]=input_image feature = sess.run(hg.feature,feed_dict) for i in feature: all_feature.append(i) # for i in input_label: # all_label.append(i) # all_output = np.array(all_output) # all_label = np.array(all_label) # #average_precision = average_precision_score(all_label,all_output) # np.save('output.npy',all_output) # np.save('label.npy',all_label) # auc = roc_auc_score(all_label,all_output) # loglike = log_likelihood(all_label,all_output) # time_str = datetime.datetime.now().isoformat() # tempstr = "{}: auc {:g}, log_likelihood {:g}".format(time_str, auc, loglike) # print(tempstr) # all_output=np.reshape(all_output,(-1)) # all_label=np.reshape(all_label,(-1)) # ap = average_precision_score(all_label,all_output) # auc_2 = roc_auc_score(all_label,all_output) # print 'ap:'+str(ap) # print 'auc_2:'+str(auc_2) np.save('resnet50_feature.npy',all_feature) test_step()