Ejemplo n.º 1
0
def score_model(source_file, target_file, scorer_settings, options):
    scores = []
    for option in options:
        g = tf.Graph()
        with g.as_default():
            with tf.Session() as sess:
                model, saver = nmt.create_model(option, sess)

                text_iterator = TextIterator(
                    source=source_file.name,
                    target=target_file.name,
                    source_dicts=option.source_dicts,
                    target_dict=option.target_dict,
                    batch_size=scorer_settings.b,
                    maxlen=float('inf'),
                    source_vocab_sizes=option.source_vocab_sizes,
                    target_vocab_size=option.target_vocab_size,
                    use_factor=(option.factors > 1),
                    sort_by_length=False)

                losses = nmt.calc_loss_per_sentence(
                    option,
                    sess,
                    text_iterator,
                    model,
                    normalization_alpha=scorer_settings.normalization_alpha)

                scores.append(losses)
    return scores
Ejemplo n.º 2
0
def score_model(source_file, target_file, scorer_settings, options):

    scores = []
    for option in options:
        with tf.Session() as sess:
            model, saver = create_model(option, sess)

            valid_text_iterator = TextIterator(
                source=source_file.name,
                target=target_file.name,
                source_dicts=option.source_dicts,
                target_dict=option.target_dict,
                batch_size=scorer_settings.b,
                maxlen=float('inf'),
                source_vocab_sizes=option.source_vocab_sizes,
                target_vocab_size=option.target_vocab_size,
                use_factor=(option.factors > 1),
                sort_by_length=False)

            score = validate(
                option,
                sess,
                valid_text_iterator,
                model,
                normalization_alpha=scorer_settings.normalization_alpha)
            scores.append(score)

    return scores
Ejemplo n.º 3
0
def theano_to_tensorflow_model(in_path, out_path):
    saved_model = np.load(in_path)
    config = theano_to_tensorflow_config(in_path)
    th2tf = construct_parameter_map(config)

    with tf.Session() as sess:
        model, saver = nmt.create_model(config, sess)
        seen = set()
        assign_ops = []
        for key in saved_model.keys():
            if key in th2tf:
                tf_name = th2tf[key]
                if tf_name is not None:
                    assert tf_name not in seen
                    seen.add(tf_name)
                    tf_var = tf.get_default_graph().get_tensor_by_name(tf_name)
                    if (sess.run(tf.shape(tf_var)) !=
                            saved_model[key].shape).any():
                        print "mismatch for", tf_name, key, saved_model[
                            key].shape, sess.run(tf.shape(tf_var))
                    assign_ops.append(tf.assign(tf_var, saved_model[key]))
            else:
                print "Not saving", key, "because no TF equivalent"
        sess.run(assign_ops)
        saver.save(sess, save_path=out_path)

        print "The following TF variables were not assigned (excluding Adam vars):"
        print "You should see only 'beta1_power', 'beta2_power' and 'time' variable listed"
        for tf_var in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
            if tf_var.name not in seen and 'Adam' not in tf_var.name:
                print tf_var.name
Ejemplo n.º 4
0
def theano_to_tensorflow_model(in_path, out_path):
    import numpy as np
    import tensorflow as tf
    from nmt import create_model
    saved_model = np.load(in_path)

    # Create fake config
    src_size, first_src_emb_size = saved_model['Wemb'].shape
    dim_per_factor = [first_src_emb_size]
    # check for additional source embedding layers (one per factor)
    i = 1
    while True:
        th_name = 'Wemb{0}'.format(i)
        if th_name not in saved_model:
            break
        dim_per_factor.append(saved_model[th_name].shape[1])
        th2tf[th_name] = 'encoder/embedding/embeddings_{0}:0'.format(i)
        i += 1
    src_emb_size = sum(dim_per_factor)
    trg_size, trg_emb_size = saved_model['Wemb_dec'].shape
    state_size = saved_model['decoder_U'].shape[0]
    if 'encoder_Wx_lns' in saved_model:
        layer_norm = True
    else:
        layer_norm = False
    if 'ff_logit_W' in saved_model:
        tie_decoder_embeddings = False
    else:
        tie_decoder_embeddings = True
    assert trg_emb_size == src_emb_size, 'src_emb_size ({}) and trg_emb_size ({}) do not equal, this is unsupported in TF'.format(
        src_emb_size, trg_emb_size)
    fake_config = FakeConfig(state_size, dim_per_factor, src_size, trg_size,
                             layer_norm, tie_decoder_embeddings)

    with tf.Session() as sess:
        model, saver = create_model(fake_config, sess)
        seen = set()
        assign_ops = []
        for key in saved_model.keys():
            tf_name = th2tf[key]
            if tf_name is not None:
                assert tf_name not in seen
                seen.add(tf_name)
                tf_var = tf.get_default_graph().get_tensor_by_name(tf_name)
                if (sess.run(tf.shape(tf_var)) !=
                        saved_model[key].shape).any():
                    print "mismatch for", tf_name, key, saved_model[
                        key].shape, sess.run(tf.shape(tf_var))
                assign_ops.append(tf.assign(tf_var, saved_model[key]))
            else:
                print "Not saving", key, "because no TF equivalent"
        sess.run(assign_ops)
        saver.save(sess, save_path=out_path)

        print "The following TF variables were not assigned (excluding Adam vars):"
        print "You should see only 'beta1_power', 'beta2_power' and 'time' variable listed"
        for tf_var in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
            if tf_var.name not in seen and 'Adam' not in tf_var.name:
                print tf_var.name
Ejemplo n.º 5
0
    def _load_models(self, process_id, sess):
        """
        Loads models and returns them
        """
        logging.debug("Process '%s' - Loading models\n" % (process_id))

        import tensorflow as tf
        models = []
        for i, options in enumerate(self._options):
            with tf.variable_scope("model%d" % i) as scope:
                model, saver = create_model(options, sess, ensemble_scope=scope)
                models.append(model)

        logging.info("NOTE: Length of translations is capped to {}".format(self._options[0].translation_maxlen))
        return models