def main(_): print 'reading npy...' data = np.load('../1st.npy') test_order = np.load('../test.npy') jpg_list = np.load('64bin.npy') print 'reading finished' sess = tf.Session() print 'building network...' hg = vae_margin_test.vae(is_training=False) global_step = tf.Variable(0,name='global_step',trainable=False) merged_summary = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(FLAGS.summary_dir,sess.graph) saver = tf.train.Saver(max_to_keep=None) saver.restore(sess,FLAGS.checkpoint_path) print 'restoring from '+FLAGS.checkpoint_path def test_step(): print 'Testing...' all_recon_loss = 0 all_output = [] all_label = [] batch_size = FLAGS.batch_size for i in range(int(len(test_order)/batch_size)): input_nlcd = get_data.get_nlcd(data,test_order[batch_size*i:batch_size*(i+1)]) input_image = get_data.get_jpg_test(jpg_list,test_order[batch_size*i:batch_size*(i+1)]) input_label = get_data.get_label(data,test_order[batch_size*i:batch_size*(i+1)]) feed_dict={} feed_dict[hg.input_nlcd]=input_nlcd feed_dict[hg.input_image]=input_image feed_dict[hg.input_label]=input_label feed_dict[hg.keep_prob]=1.0 output= sess.run(hg.output,feed_dict) for i in output: all_output.append(i) for i in input_label: all_label.append(i) #recon_loss = all_recon_loss/(5094.0/batch_size) all_output = np.array(all_output) all_label = np.array(all_label) # auc = roc_auc_score(all_label,all_output) # all_output=np.reshape(all_output,(-1)) # all_label=np.reshape(all_label,(-1)) # ap = average_precision_score(all_label,all_output) # time_str = datetime.datetime.now().isoformat() # new_auc = roc_auc_score(all_label,all_output) # tempstr = "{}: auc {:g}, ap {:g}, recon_loss {:g}, new_auc {:g}".format(time_str, auc, ap, recon_loss, new_auc) # print(tempstr) all_output=np.reshape(all_output,(-1,100)) all_label=np.reshape(all_label,(-1,100)) return all_output,all_label # auc = roc_auc_score(all_label,all_output) # print "new_auc {:g}".format(auc) all_output = 0.0 all_label = 0.0 all_fuck = 0.0 num = 10000.0 for i in range(int(num)): print i output,label= test_step() #all_output.append(output) all_output+=output all_label=label #f**k = np.multiply.reduce(label,1) f**k = tang_reduce(output,label) all_fuck+=f**k # tang = 0.0 # for i in all_output: # tang+=i # tang = tang/num tang = all_output/num f**k = all_fuck/num epsilon = 0 #float_labels = tf.cast(labels, tf.float32) loss = all_label * np.log(tang + epsilon) + (1 - all_label) * np.log(1 - tang + epsilon) loss = np.sum(loss,axis=1) loss = np.mean(loss) f**k = np.mean(np.log(f**k)) #print 'another final:'+str(loss) auc = roc_auc_score(all_label,all_output) tang=np.reshape(tang,(-1)) all_label=np.reshape(all_label,(-1)) np.save('all_output_1wavg.npy',tang) np.save('all_label.npy',all_label) ap = average_precision_score(all_label,tang) time_str = datetime.datetime.now().isoformat() new_auc = roc_auc_score(all_label,tang) print 'margin results' tempstr = "{}: auc {:g}, ap {:g}, recon_loss {:g}, new_auc {:g}".format(time_str, auc, ap, loss, new_auc) print(tempstr) print 'average log sigma:'+str(f**k)
def main(_): print 'reading npy...' data = np.load('../data/1st.npy') test_order = np.load('../data/test.npy') print 'reading finished' sess = tf.Session() print 'building network...' hg = vae_margin_test.vae(is_training=False) global_step = tf.Variable(0,name='global_step',trainable=False) merged_summary = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(FLAGS.summary_dir,sess.graph) saver = tf.train.Saver(max_to_keep=None) saver.restore(sess,FLAGS.checkpoint_path) print 'restoring from '+FLAGS.checkpoint_path def test_step(): print 'Testing...' all_recon_loss = 0 all_output = [] all_label = [] batch_size = 283*6*3 for i in range(int(len(test_order)/batch_size)): input_nlcd = get_data.get_nlcd(data,test_order[batch_size*i:batch_size*(i+1)]) input_label = get_data.get_label(data,test_order[batch_size*i:batch_size*(i+1)]) feed_dict={} feed_dict[hg.input_nlcd]=input_nlcd feed_dict[hg.input_label]=input_label feed_dict[hg.keep_prob]=1.0 output= sess.run(hg.output,feed_dict) for i in output: all_output.append(i) for i in input_label: all_label.append(i) #recon_loss = all_recon_loss/(5094.0/batch_size) all_output = np.array(all_output) all_label = np.array(all_label) # auc = roc_auc_score(all_label,all_output) # all_output=np.reshape(all_output,(-1)) # all_label=np.reshape(all_label,(-1)) # ap = average_precision_score(all_label,all_output) # time_str = datetime.datetime.now().isoformat() # new_auc = roc_auc_score(all_label,all_output) # tempstr = "{}: auc {:g}, ap {:g}, recon_loss {:g}, new_auc {:g}".format(time_str, auc, ap, recon_loss, new_auc) # print(tempstr) all_output=np.reshape(all_output,(-1,100)) all_label=np.reshape(all_label,(-1,100)) return all_output,all_label # auc = roc_auc_score(all_label,all_output) # print "new_auc {:g}".format(auc) all_output = 0.0 all_label = 0.0 all_fuck = 0.0 num = 10000.0 for i in range(int(num)): print i output,label= test_step() #all_output.append(output) all_output+=output all_label=label #f**k = np.multiply.reduce(label,1) f**k = tang_reduce(output,label) all_fuck+=f**k # tang = 0.0 # for i in all_output: # tang+=i # tang = tang/num tang = all_output/num f**k = all_fuck/num epsilon = 0 #float_labels = tf.cast(labels, tf.float32) loss = all_label * np.log(tang + epsilon) + (1 - all_label) * np.log(1 - tang + epsilon) loss = np.sum(loss,axis=1) loss = np.mean(loss) f**k = np.mean(np.log(f**k)) #print 'another final:'+str(loss) auc = roc_auc_score(all_label,all_output) tang=np.reshape(tang,(-1)) all_label=np.reshape(all_label,(-1)) np.save('all_output_1wavg.npy',tang) np.save('all_label.npy',all_label) ap = average_precision_score(all_label,tang) time_str = datetime.datetime.now().isoformat() new_auc = roc_auc_score(all_label,tang) print 'margin results' tempstr = "{}: auc {:g}, ap {:g}, recon_loss {:g}, new_auc {:g}".format(time_str, auc, ap, loss, new_auc) print(tempstr) print 'average log sigma:'+str(f**k)