def learning(weight):
    for line in iter(sys.stdin.readline, ""):
        label, sent = get_label_and_sentence(line)
        phi = cf.create_features(sent)
        pre_label = po.predict_one(weight, phi)
        if int(pre_label) != int(label):
            update_weights(weight, phi, int(label))
def learning(weight):
    for line in iter(sys.stdin.readline, ""):
        label, sent = get_label_and_sentence(line)
        phi = cf.create_features(sent)
        pre_label = po.predict_one(weight, phi)
        if int(pre_label) != int(label):
            update_weights(weight, phi, int(label))
def average_learning(weight):
    updates = 0.0
    average = defaultdict(lambda: 0.0)
    for line in iter(sys.stdin.readline, ""):
        label, sent = get_label_and_sentence(line)
        phi = cf.create_features(sent)
        pre_label = po.predict_one(weight, phi)
        if int(pre_label) != int(label):
            update_weights(weight, phi, int(label))
        updates += 1.0
        for key, value in weight.items():
            average[key] = (average[key] * (updates - 1.0) + value) / updates
    # copy to weight from average
    for key in weight.keys():
        weight[key] = average[key]
def average_learning(weight):
    updates = 0.0
    average = defaultdict(lambda : 0.0)
    for line in iter(sys.stdin.readline, ""):
        label, sent = get_label_and_sentence(line)
        phi = cf.create_features(sent)
        pre_label = po.predict_one(weight, phi)
        if int(pre_label) != int(label):
            update_weights(weight, phi, int(label))
        updates += 1.0
        for key, value in weight.items():
            average[key] = (average[key] * (updates - 1.0) + value) / updates
    # copy to weight from average
    for key in weight.keys():
        weight[key] = average[key]
def predict_all(model_file):
    weight = defaultdict(lambda : 0.0)

    # load model_file
    fin = open(model_file)
    for line in iter(fin.readline, ""):
        parts = line.rstrip("\n").split()
        value = parts.pop()
        name  = " ".join(parts)
        weight[name] = float(value)
    fin.close()

    # predict all
    for line in iter(sys.stdin.readline, ""):
        phi = cf.create_features(line)
        y   = po.predict_one(weight, phi)
        print y
Example #6
0
def predict_all(model_file):
    weight = defaultdict(lambda : 0.0)

    # load model_file
    fin = open(model_file)
    for line in iter(fin.readline, ""):
        parts = line.rstrip("\n").split()
        value = parts.pop()
        name  = " ".join(parts)
        weight[name] = float(value)
    fin.close()

    # predict all
    for line in iter(sys.stdin.readline, ""):
        phi = cf.create_features(line)
        y   = po.predict_one(weight, phi)
        print int(y)
def predict_nn(network, phi):

    max_layer = 1
    for (name, layer, weight) in network:
        max_layer = max(max_layer, layer)

    # Init y
    y = [{} for i in range(max_layer+1)]
    y[0] = phi

    # Run perceptron at each layer.
    for (name, layer, weight) in network:
        answer = po.predict_one(weight, y[layer-1])
        y[layer][name] = answer

    # The value of last layer is output of nn.
    last_perceptron_name = len(network)-1
    return (y[-1][last_perceptron_name], y)
def predict_nn(network, phi):

    max_layer = 1
    for (name, layer, weight) in network:
        max_layer = max(max_layer, layer)

    # Init y
    y = [{} for i in range(max_layer + 1)]
    y[0] = phi

    # Run perceptron at each layer.
    for (name, layer, weight) in network:
        answer = po.predict_one(weight, y[layer - 1])
        y[layer][name] = answer

    # The value of last layer is output of nn.
    last_perceptron_name = len(network) - 1
    return (y[-1][last_perceptron_name], y)