Exemplo n.º 1
0
 def test_constraint(self):
     model_string = "nn(fc,[X])::a(X).\nb:-a(0.2);a(0.8).\naux:-a(0.2),a(0.8).\naux2:-a(0.2);a(0.8).\nevidence(aux2).\nevidence(aux,false)."
     fc = FC(10, 2)
     net = Network(fc, 'fc', test)
     model = Model(model_string, [net])
     query = Term('b')
     solution = model.solve(query)
     print(solution)
Exemplo n.º 2
0
 def test_test(self):
     model_string = "nn(fc,[X,Y],Z,[0,1]) :: a(X,Y,Z).\nb(X,Y,Z) :- a(X,Y,Z)."
     fc = FC(10, 2)
     net = Network(fc, 'fc',
                   lambda a, b, c: Variable(torch.FloatTensor([0.2, 0.8])))
     model = Model(model_string, [net])
     query = Term('b', Constant("string with 's"), Constant(3), Var('X'))
     solution = model.solve(query, test=True)
     print(solution)
Exemplo n.º 3
0
    def test_indirect_evidence(self):
        model_string = """nn(fc,[X],Y,[0,1,2,3,4,5,6,7,8,9])::digit(X,Y).
addition(X,Y,Z) :- digit(X,X2),digit(Y,Y2),Z is X2+Y2."""
        #evidence(digit(2,2))."""
        #model_string = "nn(fc,[X])::a(X).\nb:-a(0.2).\nc:-b.\nevidence(b)."
        fc = FC(10, 10)
        net = Network(fc, 'fc',
                      lambda a, b: Variable(torch.FloatTensor([0.1] * 10)))
        model = Model(model_string, [net])
        query = Term('digit', Constant(2), Constant(2))
        #query = Term('addition',Constant(2),Constant(3),Constant(5))
        solution = model.solve(query)
        print(solution)
Exemplo n.º 4
0
    def test_save_load(self):
        model_string = "nn(fc,[X,Y],Z,[0,1]) :: a(X,Y,Z).\nt(0.5)::b."
        fc = FC(10, 2)
        net = Network(fc, 'fc',
                      lambda a, b, c: Variable(torch.FloatTensor([0.2, 0.8])))
        model = Model(model_string, [net])
        model.save_state('test_save.mdl')
        orig_params = dict()
        for p in model.parameters:
            orig_params[p] = model.parameters[p]
            model.parameters[p] = 0
        original_net_params = list()

        for i, param in enumerate(fc.parameters()):
            original_net_params.append(param.data.clone())
            param.data.zero_()
        model.load_state('test_save.mdl')
        for p in model.parameters:
            self.assertEqual(model.parameters[p], orig_params[p])

        for i, param in enumerate(fc.parameters()):
            self.assertTrue(torch.equal(param.data, original_net_params[i]))
Exemplo n.º 5
0
    def test_fact_instantiate(self):
        fc = FC(10, 2)
        net = Network(fc, 'fc', None)
        in_line = 'nn(fc,[X,Y])::term(X,Y) :- a(Y).'
        out_line = 'nn(fc,[X,Y])::term(X,Y) :-  a(Y).'
        term = parser.parseString(in_line)[0]
        expected = parser.parseString(out_line)[0]
        net = net.instantiate(term)
        self.assertEqual(str(net.term), str(expected))

        in_line = 'nn(fc,[X,Y])::term(X,Y).'
        out_line = 'nn(fc,[X,Y])::term(X,Y).'
        term = parser.parseString(in_line)[0]
        expected = parser.parseString(out_line)[0]
        net = net.instantiate(term)
        self.assertEqual(str(net.term), str(expected))
Exemplo n.º 6
0
    def test_ad_instantiate(self):
        fc = FC(10, 2)
        net = Network(fc, 'fc', None)
        in_line = 'nn(fc,[X,Y],Z,[0,1])::term(X,Y,Z) :- a(Y).'
        out_line = 'nn(fc,[X,Y],0)::term(X,Y,0);nn(fc,[X,Y],1)::term(X,Y,1) :- a(Y).'
        test_out_line = 'nn(fc,[X,Y],Z)::term(X,Y,Z) :- fc(X,Y,Z), a(Y).'
        term = parser.parseString(in_line)[0]
        expected = parser.parseString(out_line)[0]
        test_expected = parser.parseString(test_out_line)[0]
        net = net.instantiate(term)
        self.assertEqual(str(net.term), str(expected))
        self.assertEqual(str(net.test_term), str(test_expected))

        in_line = 'nn(fc,[X,Y],Z,[0,1])::term(X,Y,Z).'
        out_line = 'nn(fc,[X,Y],0)::term(X,Y,0);nn(fc,[X,Y],1)::term(X,Y,1).'
        test_out_line = 'nn(fc,[X,Y],Z)::term(X,Y,Z) :- fc(X,Y,Z).'
        term = parser.parseString(in_line)[0]
        expected = parser.parseString(out_line)[0]
        test_expected = parser.parseString(test_out_line)[0]
        net = net.instantiate(term)
        self.assertEqual(str(net.term), str(expected))
        self.assertEqual(str(net.test_term), str(test_expected))
Exemplo n.º 7
0
test = 8

train_queries = load('data/train{}_test{}_train.txt'.format(train, test))
test_queries = load('data/train{}_test{}_test.txt'.format(train, test))


def neural_pred(network, i1, i2):
    d = torch.zeros(20)
    d[int(i1)] = 1.0
    d[int(i2) + 10] = 1.0
    d = torch.autograd.Variable(d.unsqueeze(0))
    output = network.net(d)
    return output.squeeze(0)


fc1 = FC(20, 2)
adam = torch.optim.Adam(fc1.parameters(), lr=1.0)
swap_net = Network(fc1, 'swap_net', neural_pred, optimizer=adam)

#with open('compare.pl') as f:
with open('quicksort.pl') as f:
    problog_string = f.read()

model = Model(problog_string, [swap_net])
optimizer = Optimizer(model, 32)

train_model(model,
            train_queries,
            20,
            optimizer,
            test_iter=len(train_queries),
Exemplo n.º 8
0

class RNN(nn.Module):
    def __init__(self, vocab_size, hidden_size):
        super(RNN, self).__init__()
        self.lstm = nn.LSTM(vocab_size, hidden_size, 1, bidirectional=True)

    def forward(self, x, n1, n2, n3):
        x, _ = self.lstm(x)
        x = torch.cat((x[-1, ...], x[n1, ...], x[n2, ...], x[n3, ...]), 1)
        x.view(1, -1)
        return x


rnn = RNN(len(vocab), 75)
network1 = FC(600, 6)
network2 = FC(600, 4)
network3 = FC(600, 2)
network4 = FC(600, 4)


def np1(net, sentence):
    if net.last[0] == str(sentence):  #Caching
        return net.last[1]
    tokenized, numbers, indices = tokenize(str(sentence).strip('"'))
    data = torch.zeros(len(tokenized), 1, len(vocab))
    for i, t in enumerate(tokenized):
        data[i, 0, t] = 1.0
    outputs = net.net(Variable(data), *indices)
    net.last = (str(sentence), outputs)
    return outputs
Exemplo n.º 9
0
with open('choose.pl') as f:
    problog_string = f.read()


def neural_pred(network, i1, i2, carry):
    d = torch.zeros(30)
    d[int(i1)] = 1.0
    d[int(i2) + 10] = 1.0
    d[int(carry) + 20] = 1.0
    d = torch.autograd.Variable(d.unsqueeze(0))
    outputs = network.net(d)
    return outputs.squeeze(0)


net1 = FC(30, 25, 10)
network1 = Network(net1, 'neural1', neural_pred)
network1.optimizer = torch.optim.Adam(net1.parameters(), lr=0.05)

net2 = FC(30, 5, 2)
network2 = Network(net2, 'neural2', neural_pred)
network2.optimizer = torch.optim.Adam(net2.parameters(), lr=0.05)

model = Model(problog_string, [network1, network2], caching=False)
optimizer = Optimizer(model, 32)
logger = train_model(model,
                     train_queries,
                     40,
                     optimizer,
                     test_iter=len(train_queries) * 4,
                     test=lambda x: x.accuracy(test_queries, test=True))