f['x_valid'] = x_valid, f['y_valid'] = y_valid f.flush() f.close() else: f = h5py.File(Configure.dataset_path, 'r') x_train = f['x_train'][:] y_train = f['y_train'][:] x_test = f['x_test'][:] y_test = f['y_test'][:] x_valid = f['x_valid'][:] y_valid = f['y_valid'][:] f.close() print("train/valid/test split: {:d}/{:d}/{:d}".format(len(y_train), len(y_valid), len(y_test))) print('---> create train/valid data wapper') train_data_wrapper = data_util.DataWrapper(x_train, y_train, istrain=True) valid_data_wrapper = data_util.DataWrapper(x_valid, y_valid, istrain=True) test_data_wrapper = data_util.DataWrapper(x_valid, y_valid, istrain=False) print('---> build model') # Built model and start training # ================================================== with tf.Graph().as_default(), tf.device('/gpu:2'): session_conf = tf.ConfigProto( allow_soft_placement=True, log_device_placement=False) session = tf.Session(config=session_conf) with session.as_default(): text_rcnn = TextRCNN( label_size=FLAGS.label_size, sequence_length=max_document_length,
print('---> load test text dataset') # datasets = data_util.get_datasets_20newsgroup() # x_text, y = data_util.load_data_labels(datasets) x_text, y = data_util.load_text_datasets() # Restore vocabulary in checkpoint dir vocabulary_path = os.path.join(FLAGS.checkpoint_dir, '..', 'vocabulary') vocab_processor = learn.preprocessing.VocabularyProcessor.restore( filename=vocabulary_path) # map text to vocabulary index x_text = np.array(list(vocab_processor.transform(x_text))) print('---> create test data wapper') test_data_wrapper = data_util.DataWrapper(x_text, istrain=False, is_shuffle=False) # Evaluation # ================================================== checkpoint_file = tf.train.latest_checkpoint(FLAGS.checkpoint_dir) print(checkpoint_file) with tf.Graph().as_default(), tf.device('/gpu:2'): session_conf = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False) session = tf.Session(config=session_conf) with session.as_default(): # Load the saved meta graph and restore variables saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file)) saver.restore(session, checkpoint_file)