def model_for(lstm, semantic_dir=None, hyper_parameters=None, extra=None, \
              model_fn=lambda scope, hyper_parameters, extra, case_labels, hidden_vector, word_labels, output_labels: None):
    if semantic_dir is None:
        assert hyper_parameters is not None and extra is not None, "one of (semantic_dir) or (hyper_parameters, extra) must be specified"
    else:
        assert hyper_parameters is None and extra is None, "both (semantic_dir) and (hyper_parameters, extra) cannot be specified"
        hyper_parameters = get_hyper_parameters(semantic_dir)
        extra = get_extra(semantic_dir)

    case_labels = mlbase.Labels(lstm.keys())
    hidden_vector = mlbase.VectorField(
        max(lstm.hyper_parameters.width,
            lstm.hyper_parameters.embedding_width))
    return model_fn("sem", hyper_parameters, extra, case_labels, hidden_vector,
                    lstm.word_labels, lstm.output_labels)
Exemple #2
0
import os

from ml import nn as ffnn
from ml import base as mlbase
from pytils import adjutant
from pytils.log import setup_logging, user_log

setup_logging(".%s.log" % os.path.splitext(os.path.basename(__file__))[0],
              True, False, True)

KINDS = ["outputs", "cells"]
LAYERS = 2
WIDTH = 5
words = set(["abc", "def", "ghi"])
kind_labels = mlbase.Labels(set(KINDS))
layer_labels = mlbase.Labels(set(range(LAYERS)))
activation_vector = mlbase.VectorField(WIDTH)
predictor_input = mlbase.ConcatField(
    [kind_labels, layer_labels, activation_vector])
predictor_output = mlbase.Labels(words)
predictor = ffnn.Model("predictor",
                       ffnn.HyperParameters().width(10).layers(1),
                       predictor_input, predictor_output, mlbase.SINGLE_LABEL)

data = [
    mlbase.Xy(("outputs", 0, [.1, .2, .3, .4, .5]), {
        "abc": .6,
        "def": .2,
        "ghi": .2
    }),
    mlbase.Xy(("outputs", 1, [.1, .2, .3, .4, .5]), {
def get_words(data_dir):
    words = set([word for word in pickler.load(os.path.join(data_dir, WORDS))])
    return mlbase.Labels(words.union(set([mlbase.BLANK])), unknown=nlp.UNKNOWN)
def get_outputs(data_dir):
    outputs = set([output for output in pickler.load(os.path.join(data_dir, OUTPUTS))])
    return mlbase.Labels(outputs)
def vocabulary(words):
    return mlbase.Labels(words.union(set([START, END])), unknown=UNKNOWN)