示例#1
0
def as_sat_solver():
    p = {
        'x': torch.nn.Parameter(torch.tensor([0.0])),
        'y': torch.nn.Parameter(torch.tensor([0.0])),
        'z': torch.nn.Parameter(torch.tensor([0.0]))
    }

    def generate_clauses():
        return [
            truthy(p['x']),
            or_(neg_(truthy(p['x'])), truthy(p['y'])),
            or_(neg_(truthy(p['y'])), truthy(p['z']))
        ]

    sat_solver = SatSolver(parameters=p, generate_clauses=generate_clauses)
    optimizer = torch.optim.Adam(sat_solver.parameters(), lr=1.0)
    for i in range(10):
        optimizer.zero_grad()
        sat_val = sat_solver()
        loss = neg_(sat_val)
        print(f"Sat={sat_val.item()}, loss={loss.item()}")
        loss.backward()
        optimizer.step()
示例#2
0
 def get_clause():
     return {
         # truthy just adds a sigmoid, to avoid clamping but slows down training
         'neg(x)_or_y': or_(neg_(truthy(x)), truthy(y)),  # x -> y
         'neg(y)_or_z': or_(neg_(truthy(y)), truthy(z))  # y -> z
     }
示例#3
0
 def get_clause():
     return {
         'x': x,
         'neg(x)_or_y': or_(neg_(x), y),
         'neg(y)_or_z': or_(neg_(y), z)
     }
示例#4
0
 def generate_clauses():
     return [
         truthy(p['x']),
         or_(neg_(truthy(p['x'])), truthy(p['y'])),
         or_(neg_(truthy(p['y'])), truthy(p['z']))
     ]
示例#5
0
from loss import and_, or_, neg_, clause_loss, truthy

zero = torch.tensor([0])
one = torch.tensor([1])
assert and_(zero, zero).item() == 0
assert and_(zero, one).item() == 0
assert and_(one, zero).item() == 0
assert and_(one, one).item() == 1

assert or_(zero, zero).item() == 0
assert or_(zero, one).item() == 1
assert or_(one, zero).item() == 1
assert or_(one, one).item() == 1

assert neg_(zero).item() == 1
assert neg_(one).item() == 0


def simple_optim():
    """
    Try to SAT:
    x -- and
    neg(x) or y -- and
    neg(y) or z

    should be all 1

    neg(x) or y is equivalent to x -> y.
    Thus the clauses are: x and x -> y and y -> z
    """