Esempio n. 1
0
def square():
    graph = [
        [0, 1, 2],
        [1, ~0, 3],
        [2, ~1, 3],
        [3, ~1, ~0],
    ]
    NUM_QUESTION = len(set(g[0] for g in graph))
    assert NUM_QUESTION == 4
    NUM_ANSWER = len(set(~g[i] for g in graph for i in [1, 2] if g[i] < 0))
    assert NUM_ANSWER == 2

    data = graph2data(graph, 0, NUM_QUESTION, NUM_ANSWER)
    NUM_DATA = len(data)
    mat_phi = np.array([d[0] for d in data])
    mat_t = np.array([d[1] for d in data])

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(NUM_ANSWER)]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print weights
Esempio n. 2
0
def sampling():
    "sampling from all data"
    SIZE = 10
    NUM_QUESTION = SIZE
    NUM_ANSWER = SIZE + 1
    graph = generate_tree_graph(SIZE)
    data = list(graph2all_data(graph))
    from random import shuffle
    shuffle(data)
    data = data[:11]
    mat_phi = np.array([d[0] for d in data])
    print mat_phi
    mat_t = np.array([d[1] for d in data])
    print mat_t

    weights = [
        logreg.learn((mat_phi, mat_t[:, i])) for i in range(mat_t.shape[1])
    ]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(weights)
    print correct_ratio(NUM_QUESTION, NUM_ANSWER, weights)
    return weights
Esempio n. 3
0
def change_scale(size=10, add_offset_column=True):
    SIZE = 10
    NUM_QUESTION = SIZE
    NUM_ANSWER = SIZE + 1
    graph = [[i, ~i, i + 1] for i in range(SIZE)]
    graph[-1][-1] = ~SIZE
    data = list(graph2all_data(graph, add_offset_column=add_offset_column))
    mat_phi = np.array([d[0] for d in data])
    mat_phi[:, 1] *= 10
    mat_phi[:, 3] *= 100
    print mat_phi
    mat_t = np.array([d[1] for d in data])
    print mat_t

    weights = [
        logreg.learn((mat_phi, mat_t[:, i]), 1000)
        for i in range(mat_t.shape[1])
    ]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(weights)
    print correct_ratio(NUM_QUESTION,
                        NUM_ANSWER,
                        weights,
                        add_offset_column=add_offset_column)
Esempio n. 4
0
def square():
    graph = [
        [0,  1,  2],
        [1, ~0,  3],
        [2, ~1,  3],
        [3, ~1, ~0],
    ]
    NUM_QUESTION = len(set(g[0] for g in graph))
    assert NUM_QUESTION == 4
    NUM_ANSWER = len(set(~g[i] for g in graph for i in [1, 2] if g[i] < 0))
    assert NUM_ANSWER == 2

    data = graph2data(graph, 0, NUM_QUESTION, NUM_ANSWER)
    NUM_DATA = len(data)
    mat_phi = np.array([d[0] for d in data])
    mat_t = np.array([d[1] for d in data])

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(NUM_ANSWER)]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print weights
Esempio n. 5
0
def one_way_tree2():
    SIZE = 10
    NUM_QUESTION = SIZE - 1
    NUM_ANSWER = SIZE
    NUM_DATA = SIZE
    mat_phi = np.zeros((NUM_DATA, NUM_QUESTION))
    mat_t = np.zeros((NUM_DATA, NUM_ANSWER))
    for i in range(NUM_DATA):
        mat_t[i][i] = 1.0
        for j in range(i):
            mat_phi[i, j] = -1.0
        if i < NUM_QUESTION:
            mat_phi[i, i] = +1.0

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(NUM_ANSWER)]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    from itertools import product
    qs = product([-1, 1], repeat=NUM_QUESTION)
    num_ok = 0
    for q in qs:
        if not 1.0 in q:
            a = NUM_ANSWER - 1
        #else:
        #    a = q.index(1.0)
        probs = logreg.get_multi_prob(np.array(q), weights)
        if np.array(probs).argmax() == a:
            num_ok += 1
    print num_ok / (2.0**NUM_QUESTION)
Esempio n. 6
0
def change_scale(size=10, add_offset_column=True):
    SIZE = 10
    NUM_QUESTION = SIZE
    NUM_ANSWER = SIZE + 1
    graph = [
        [i, ~i, i + 1]
        for i in range(SIZE)
    ]
    graph[-1][-1] = ~SIZE
    data = list(graph2all_data(graph, add_offset_column=add_offset_column))
    mat_phi = np.array([d[0] for d in data])
    mat_phi[:,1] *= 10
    mat_phi[:,3] *= 100
    print mat_phi
    mat_t = np.array([d[1] for d in data])
    print mat_t

    weights = [logreg.learn((mat_phi, mat_t[:, i]), 1000) for i in range(mat_t.shape[1])]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(weights)
    print correct_ratio(NUM_QUESTION, NUM_ANSWER, weights, add_offset_column=add_offset_column)
Esempio n. 7
0
def sampling():
    "sampling from all data"
    SIZE = 10
    NUM_QUESTION = SIZE
    NUM_ANSWER = SIZE + 1
    graph = generate_tree_graph(SIZE)
    data = list(graph2all_data(graph))
    from random import shuffle
    shuffle(data)
    data = data[:11]
    mat_phi = np.array([d[0] for d in data])
    print mat_phi
    mat_t = np.array([d[1] for d in data])
    print mat_t

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(mat_t.shape[1])]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(weights)
    print correct_ratio(NUM_QUESTION, NUM_ANSWER, weights)
    return weights
Esempio n. 8
0
def one_way_tree2():
    SIZE = 10
    NUM_QUESTION = SIZE - 1
    NUM_ANSWER = SIZE
    NUM_DATA = SIZE
    mat_phi = np.zeros((NUM_DATA, NUM_QUESTION))
    mat_t = np.zeros((NUM_DATA, NUM_ANSWER))
    for i in range(NUM_DATA):
        mat_t[i][i] = 1.0
        for j in range(i):
            mat_phi[i, j] = -1.0
        if i < NUM_QUESTION:
            mat_phi[i, i] = +1.0

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(NUM_ANSWER)]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    from itertools import product
    qs = product([-1, 1], repeat=NUM_QUESTION)
    num_ok = 0
    for q in qs:
        if not 1.0 in q:
            a = NUM_ANSWER - 1
        #else:
        #    a = q.index(1.0)
        probs = logreg.get_multi_prob(np.array(q), weights)
        if np.array(probs).argmax() == a:
            num_ok += 1
    print num_ok / (2.0 ** NUM_QUESTION)
Esempio n. 9
0
def with_all_data(graph=graph):
    "with all data"
    data = list(graph2all_data(graph))
    mat_phi = np.array([d[0] for d in data])
    print mat_phi
    mat_t = np.array([d[1] for d in data])
    print mat_t

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(mat_t.shape[1])]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(weights)
    return weights
Esempio n. 10
0
def with_all_data(graph=graph):
    "with all data"
    data = list(graph2all_data(graph))
    mat_phi = np.array([d[0] for d in data])
    print mat_phi
    mat_t = np.array([d[1] for d in data])
    print mat_t

    weights = [
        logreg.learn((mat_phi, mat_t[:, i])) for i in range(mat_t.shape[1])
    ]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(weights)
    return weights
Esempio n. 11
0
def one_way_tree():
    SIZE = 4
    NUM_QUESTION = SIZE - 1
    NUM_ANSWER = SIZE
    NUM_DATA = SIZE
    mat_phi = np.zeros((NUM_DATA, NUM_QUESTION))
    mat_t = np.zeros((NUM_DATA, NUM_ANSWER))
    for i in range(NUM_DATA):
        mat_t[i][i] = 1.0
        for j in range(i):
            mat_phi[i, j] = -1.0
        if i < NUM_QUESTION:
            mat_phi[i, i] = +1.0

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(NUM_ANSWER)]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(np.array(weights))
Esempio n. 12
0
def one_way_tree():
    SIZE = 4
    NUM_QUESTION = SIZE - 1
    NUM_ANSWER = SIZE
    NUM_DATA = SIZE
    mat_phi = np.zeros((NUM_DATA, NUM_QUESTION))
    mat_t = np.zeros((NUM_DATA, NUM_ANSWER))
    for i in range(NUM_DATA):
        mat_t[i][i] = 1.0
        for j in range(i):
            mat_phi[i, j] = -1.0
        if i < NUM_QUESTION:
            mat_phi[i, i] = +1.0

    weights = [logreg.learn((mat_phi, mat_t[:, i])) for i in range(NUM_ANSWER)]

    for d in mat_phi:
        probs = logreg.get_multi_prob(d, weights)
        print d
        print ' '.join('%0.2f' % p for p in probs)

    print_mat(np.array(weights))