def __init__(self): config = Config() setting = Settings(*('implicit')) config.update(setting.dict()) config.set_format("ucca") self.feature_extractor = DenseFeatureExtractor( OrderedDict(), indexed=config.args.classifier != 'mlp', hierarchical=False, node_dropout=config.args.node_dropout, omit_features=config.args.omit_features) self.sess = tf.Session() saver = tf.train.import_meta_graph(glob('env_r_model-*.meta')[0]) saver.restore(self.sess, tf.train.latest_checkpoint('./')) graph = tf.get_default_graph() self.x = graph.get_tensor_by_name("Placeholder:0") self.y = graph.get_tensor_by_name("dense_2/BiasAdd:0") self.length = None
passage = load_passage(filename) sys.stdout.write('.') sys.stdout.flush() #store_sequence_to = "data/oracles/%s/%s.txt" % (cat, basename(filename))#, setting.suffix()) #with open(store_sequence_to, "w", encoding="utf-8") as f: # for i, action in enumerate(gen_actions(passage, feature_extractor)): # pass#print(action, file=f) for _ in gen_actions(passage, feature_extractor): pass if __name__ == "__main__": config = Config() setting = Settings(*('implicit')) config.update(setting.dict()) config.set_format("ucca") feature_extractor = DenseFeatureExtractor( OrderedDict(), indexed=config.args.classifier != 'mlp', hierarchical=False, node_dropout=config.args.node_dropout, omit_features=config.args.omit_features) filenames = passage_files(sys.argv[1]) for filename in filenames: #TODO: solve the problem of "KILLED" while wring file. Use 100 files temporarily before solving this. produce_oracle(filename, feature_extractor) # dump envTrainingData to a file for further learning in rewardNN.py json_str = json.dumps(envTrainingData) + "\n" json_bytes = json_str.encode('utf-8') with gzip.GzipFile('env-{}.json'.format(sys.argv[1]), 'w') as fout: