def get_train_and_test_data(input_dir, dataset_name, repeat=False): """ Returns: D_train, D_test: 2 AutoDLDataset objects (defined in `dataset.py`) """ train_path = os.path.join(input_dir, dataset_name, dataset_name + '.data', 'train') test_path = os.path.join(input_dir, dataset_name, dataset_name + '.data', 'test') D_train = AutoDLDataset(train_path) D_train.init(repeat=repeat) D_test = AutoDLDataset(test_path) D_test.init(repeat=repeat) return D_train, D_test
def news_baseline_test(): dataset = AutoDLDataset('./tweet.data/train/') dataset.init() iterator = dataset.get_dataset().make_one_shot_iterator() next_element = iterator.get_next() # features, labels = next_element # # features = features.eval() # labels = labels.eval() # # print next_element data = [] sess = tf.Session() for idx in range(10): print("Example " + str(idx)) data.append(sess.run(next_element)) for each_data in data: print each_data