Exemplo n.º 1
0
def main():
    global activation_data
    activation_data_file = os.path.join(RESUME_DIR, "activation_data.pickle")

    if os.path.exists(activation_data_file):
        activation_data = pickler.load(activation_data_file)
    else:
        raise ValueError()

    user_input = ""

    while not user_input.startswith("quit"):
        user_input = input(
            "enter next search (part,layer|axis:target_value,..): ")

        if not user_input.startswith("quit"):
            query = None

            try:
                part, layer, query = parse(user_input)
                print("(%s, %s, %s)" % (part, layer, query))
            except WriteLast as e:
                pickler.dump((part, layer, result[:q10]), "result-q10.pickle")
                pickler.dump((part, layer, result[:q25]), "result-q25.pickle")
                pickler.dump((part, layer, result[:q50]), "result-q50.pickle")
            except Exception as e:
                print(e)
                print("error interpreting: %s" % user_input)

            if query is not None:
                result, q10, q25, q50 = find_closest(part, layer, query)
                print("found %d: " % len(result))

                for r in result[:TOP]:
                    print(r)
        else:
            # Exit path - don't do anything
            pass

    return 0
Exemplo n.º 2
0
def set_buckets(reduction_dir, key, learned_buckets, fixed_buckets):
    learned_path = os.path.join(reduction_dir,
                                os.path.join(LEARNED_BUCKETS, key))
    fixed_path = os.path.join(reduction_dir, os.path.join(FIXED_BUCKETS, key))
    pickler.dump([item for item in learned_buckets.items()], learned_path)
    pickler.dump([item for item in fixed_buckets.items()], fixed_path)
Exemplo n.º 3
0
def set_hidden_states(states_dir, kind, key, states):
    pickler.dump(states,
                 os.path.join(states_dir,
                              _folder(kind) + "." + key),
                 converter=lambda hs: tuple(
                     (hs.word, hs.point, hs.annotation)))
Exemplo n.º 4
0
def set_activation_states(states_dir, key, states):
    pickler.dump(states,
                 os.path.join(states_dir, STATES_ACTIVATION + "." + key),
                 converter=lambda _as: tuple(
                     (_as.sequence, _as.index, _as.point)))
Exemplo n.º 5
0
def set_outputs(data_dir, outputs, sort_key=lambda item: item):
    pickler.dump(sorted(outputs, key=sort_key), os.path.join(data_dir, OUTPUTS))
Exemplo n.º 6
0
def set_output_distribution(data_dir, distribution):
    pickler.dump([(key, value) for key, value in distribution.items()], os.path.join(data_dir, OUTPUT_DISTRIBUTION))
Exemplo n.º 7
0
def set_words(data_dir, words):
    pickler.dump([word for word in words], os.path.join(data_dir, WORDS))
Exemplo n.º 8
0
def set_pos(data_dir, pos_tags):
    pickler.dump([pos for pos in pos_tags], os.path.join(data_dir, POS_TAGS))
Exemplo n.º 9
0
def set_pos_mapping(data_dir, pos_mapping):
    pickler.dump([item for item in pos_mapping.items()], os.path.join(data_dir, POS_MAPPING))
Exemplo n.º 10
0
def _set_data(data_dir, pairs, kind):
    target_path = os.path.join(data_dir, XYS_TRAIN if kind == "train" else (XYS_TEST if kind == "test" else XYS_VALIDATION))
    pickler.dump(pairs, target_path)
import sys

from nnwd import pickler

with open(sys.argv[1], "r") as fh:
    words = set()
    sequences = []

    for line in fh.readlines():
        if line.strip() != "":
            sequence = []

            for word in line.strip().split(" "):
                sequence += [(word, None)]

                if word not in words:
                    words.add(word)

            sequences += [sequence]

    pickler.dump(sequences, sys.argv[2])
    pickler.dump([word for word in words], "words")