Beispiel #1
0
                params.scope_name or "model",
                reuse=tf.AUTO_REUSE,
                dtype=tf.as_dtype(dtype.floatx()),
                custom_getter=dtype.float32_variable_storage_getter):
            state = encoder(source, params)
            state["decoder"] = {"state": state["decoder_initializer"]}
            return state

    def decoding_fn(target, state, time):
        with tf.variable_scope(
                params.scope_name or "model",
                reuse=tf.AUTO_REUSE,
                dtype=tf.as_dtype(dtype.floatx()),
                custom_getter=dtype.float32_variable_storage_getter):
            if params.search_mode == "cache":
                step_loss, step_logits, step_state, _ = decoder(
                    target, state, params)
            else:
                estate = encoder(state, params)
                estate['dev_decode'] = True
                _, step_logits, _, _ = decoder(target, estate, params)
                step_state = state

            return step_logits, step_state

    return encoding_fn, decoding_fn


# register the model, with a unique name
model.model_register("rnnsearch_deepatt", train_fn, score_fn, infer_fn)
Beispiel #2
0
                dtype=tf.as_dtype(dtype.floatx()),
                custom_getter=dtype.float32_variable_storage_getter):
            state = encoder(source, params)
            state["decoder"] = {"state": state["decoder_initializer"]}
            return state

    def decoding_fn(target, state, time):
        with tf.variable_scope(
                params.scope_name or "model",
                reuse=tf.AUTO_REUSE,
                dtype=tf.as_dtype(dtype.floatx()),
                custom_getter=dtype.float32_variable_storage_getter):
            if params.search_mode == "cache":
                state['time'] = time
                step_loss, step_logits, step_state, _ = decoder(
                    target, state, params)
                del state['time']
            else:
                estate = encoder(state, params)
                estate['dev_decode'] = True
                _, step_logits, _, _ = decoder(target, estate, params)
                step_state = state

            return step_logits, step_state

    return encoding_fn, decoding_fn


# register the model, with a unique name
model.model_register("transformer_aan", train_fn, score_fn, infer_fn)
Beispiel #3
0
                params.scope_name or "model",
                reuse=tf.AUTO_REUSE,
                dtype=tf.as_dtype(dtype.floatx()),
                custom_getter=dtype.float32_variable_storage_getter):
            state = encoder(source, params)
            state["decoder"] = {"state": state["decoder_initializer"]}
            return state

    def decoding_fn(target, state, time):
        with tf.variable_scope(
                params.scope_name or "model",
                reuse=tf.AUTO_REUSE,
                dtype=tf.as_dtype(dtype.floatx()),
                custom_getter=dtype.float32_variable_storage_getter):
            if params.search_mode == "cache":
                step_loss, step_logits, step_state, _ = decoder(
                    target, state, params)
            else:
                estate = encoder(state, params)
                estate['dev_decode'] = True
                _, step_logits, _, _ = decoder(target, estate, params)
                step_state = state

            return step_logits, step_state

    return encoding_fn, decoding_fn


# register the model, with a unique name
model.model_register("deepnmt", train_fn, score_fn, infer_fn)