start = pre_index check_index = start + batch_size if check_index > data_size: # Start next epoch start = 0 end = start + batch_size return start, end ### End define function ### ### Read file of train data and label oriTrData, o.inDim = myio.read_data_file(trDataFile,o.inDim) oriTrLabel_tmp = myio.read_label_file(trLabelFile) oriTrLabel = myio.dense_to_one_hot(oriTrLabel_tmp,o.numClass) oriTrData, oriTrLabel=trainShuff(oriTrData, oriTrLabel) # shuffling valInx = oriTrData.shape[0]/100*valRate valData = oriTrData[0:valInx] valLabel = oriTrLabel[0:valInx] trData = oriTrData[valInx+1:oriTrData.shape[0]] trLabel = oriTrLabel[valInx+1:oriTrLabel.shape[0]] totalBatch = trData.shape[0]/miniBatch ### Main script ###
x = graph.get_tensor_by_name("x:0") lab_y = graph.get_tensor_by_name("lab_y:0") #mm_last = graph.get_tensor_by_name("mm_last:0") #b_last = graph.get_tensor_by_name("b_last:0") out_y = graph.get_tensor_by_name("out_y:0") keepProb = graph.get_tensor_by_name("keepProb:0") ce = graph.get_tensor_by_name("ce:0") acc = graph.get_tensor_by_name("acc:0") test_data, featdim = myio.read_data_file(data_file) print 'LOG : predict probability using DNN-model' with tf.device('/cpu:0'): pred_data = sess.run(out_y, feed_dict={x: test_data, keepProb: 1.0}) if in_lab: test_lab = myio.read_label_file(label_file) test_lab_ot = myio.dense_to_one_hot(test_lab, 2) pred_acc = sess.run(acc, feed_dict={out_y: pred_data, lab_y: test_lab_ot}) pred_ce = sess.run(ce, feed_dict={out_y: pred_data, lab_y: test_lab_ot}) print 'Results : ' print ' # of data = %d' % (pred_data.shape[0]) print ' average of cross entropy = %f' % (pred_ce) print ' accuracy = %2.1f%%' % (pred_acc * 100) print '### done\n' if o.predprob != '': print 'LOG : write predicted probability -> %s' % (o.predprob) myio.write_predicted_prob(pred_data, o.predprob) if o.predlab != '':