Exemple #1
0
 def __init__(self, hp):
     self.hp = hp
     self.token2idx, self.idx2token = _load_vocab(hp.vocab)
     self.vocab_len = len(self.token2idx)
     self.embeddings = get_token_embeddings(self.vocab_len,
                                            self.hp.d_model,
                                            zero_pad=True)
    def __init__(self, model_dir, vocab_file):
        """
        :param model_dir: model dir path
        :param vocab_file: vocab file path
        """
        self.tf = import_tf(0)

        self.model_dir = model_dir
        self.vocab_file = vocab_file
        self.token2idx, self.idx2token = _load_vocab(vocab_file)

        hparams = Hparams()
        parser = hparams.parser
        self.hp = parser.parse_args()

        self.model = Transformer(self.hp)

        self._add_placeholder()
        self._init_graph()
Exemple #3
0
# create a iter of the correct shape and type
xs, ys = iter.get_next()

logging.info('# init data')
training_iter = train_batches.make_one_shot_iterator()
val_iter = eval_batches.make_initializable_iterator()

logging.info("# Load model")
m = Transformer(hp)

# get op
loss, train_op, global_step, train_summaries = m.train(xs, ys)
y_hat, eval_summaries = m.eval(xs, ys)

token2idx, idx2token = _load_vocab(hp.vocab)

bs = BeamSearch(m, hp.beam_size,
                list(idx2token.keys())[2],
                list(idx2token.keys())[3], idx2token, hp.maxlen2, m.x,
                m.decoder_inputs, m.logits)

logging.info("# Session")
saver = tf.train.Saver(max_to_keep=hp.num_epochs)
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
    ckpt = tf.train.latest_checkpoint(hp.logdir)
    if ckpt is None:
        logging.info("Initializing from scratch")
        sess.run(tf.global_variables_initializer())
        save_variable_specs(os.path.join(hp.logdir, "specs"))
    else: