Esempio n. 1
0
    def predict_distributions(self, word, points):
        annotation = None
        keys = []
        xys = []

        for key, point in points.items():
            keys += [key]
            xys += [mlbase.Xy(self.as_input(key, point), annotation)]

        results, _ = self.sem.evaluate(xys)

        distribution_predictions = {}

        for i, key in enumerate(keys):
            ordered_predictions = [
                item[0] for item in sorted(results[i].distribution().items(),
                                           key=lambda item: item[1],
                                           reverse=True)
            ]
            distribution_predictions[key] = {
                prediction: results[i].distribution()[prediction]
                for prediction in ordered_predictions[:self.top_k]
            }

        return distribution_predictions
Esempio n. 2
0
 def evaluate_sequence(self, xs, handle_unknown=False, instrument_names=[]):
     feed = self.get_testing_feed([mlbase.Xy([(x, None) for x in xs], [])])
     instruments = self.get_instruments(instrument_names)
     distributions, *instrument_values = self.session.run([self.output_distributions] + instruments, feed_dict=feed)
     assert len(distributions) == len(xs), "%d != %d" % (len(distributions), len(xs))
     assert len(distributions[-1]) == 1, "%d != 1" % (len(distributions[-1]))
     distribution = distributions[-1][0]
     result = Result(self.output_labels.vector_decode(distribution), self.output_labels.vector_decode_distribution(distribution), self.output_labels.encoding())
     return result, {name: instrument_values[i] for i, name in enumerate(instrument_names)}
 def stream_fn(key):
     for hidden_state in states.stream_hidden_test(states_dir, key):
         yield mlbase.Xy(as_input(key, hidden_state),
                         hidden_state.annotation)
 def test_xys():
     for key, hidden_state in states.random_stream_hidden_states(
             states_dir, "test", key_set):
         yield mlbase.Xy(as_input(key, hidden_state),
                         hidden_state.annotation)
 def train_xys():
     for key, hidden_state in states.random_stream_hidden_states(
             states_dir, "train", key_set, sample_rate=0.25):
         yield mlbase.Xy(as_input(key, hidden_state),
                         hidden_state.annotation)
Esempio n. 6
0
WIDTH = 5
words = set(["abc", "def", "ghi"])
kind_labels = mlbase.Labels(set(KINDS))
layer_labels = mlbase.Labels(set(range(LAYERS)))
activation_vector = mlbase.VectorField(WIDTH)
predictor_input = mlbase.ConcatField(
    [kind_labels, layer_labels, activation_vector])
predictor_output = mlbase.Labels(words)
predictor = ffnn.Model("predictor",
                       ffnn.HyperParameters().width(10).layers(1),
                       predictor_input, predictor_output, mlbase.SINGLE_LABEL)

data = [
    mlbase.Xy(("outputs", 0, [.1, .2, .3, .4, .5]), {
        "abc": .6,
        "def": .2,
        "ghi": .2
    }),
    mlbase.Xy(("outputs", 1, [.1, .2, .3, .4, .5]), {
        "abc": .1,
        "def": .6,
        "ghi": .3
    }),
    #mlbase.Xy(("outputs", 1, [.1, .2, .3, .4, .5]), {"abc": .3, "def": .3, "ghi": .4}),
    #mlbase.Xy(("outputs", 1, [.5, .4, .3, .2, .1]), {"abc": .4, "def": .4, "ghi": .2}),
    #mlbase.Xy(("cells", 0, [.1, .2, .3, .4, .5]), {"abc": .2, "def": .4, "ghi": .4}),
    #mlbase.Xy(("cells", 0, [.5, .4, .3, .2, .1]), {"abc": .6, "def": .2, "ghi": .2}),
    #mlbase.Xy(("cells", 1, [.1, .2, .3, .4, .5]), {"abc": .35, "def": .35, "ghi": .3}),
    #mlbase.Xy(("cells", 1, [.5, .4, .3, .2, .1]), {"abc": .3, "def": .3, "ghi": .4}),
]
Esempio n. 7
0
def _xy_lm(data):
    # data is a sequence: [(word1, pos1), .., (wordN, posN)]
    if len(data) > 1:
        return mlbase.Xy(data[:-1], data[1:])
    else:
        return None
Esempio n. 8
0
def _xy_sa(data):
    # data is a tuple: ([(word1, pos1), .., (wordN, posN)], sentiment)
    return mlbase.Xy(data[0], data[1])