def as_sat_solver(): p = { 'x': torch.nn.Parameter(torch.tensor([0.0])), 'y': torch.nn.Parameter(torch.tensor([0.0])), 'z': torch.nn.Parameter(torch.tensor([0.0])) } def generate_clauses(): return [ truthy(p['x']), or_(neg_(truthy(p['x'])), truthy(p['y'])), or_(neg_(truthy(p['y'])), truthy(p['z'])) ] sat_solver = SatSolver(parameters=p, generate_clauses=generate_clauses) optimizer = torch.optim.Adam(sat_solver.parameters(), lr=1.0) for i in range(10): optimizer.zero_grad() sat_val = sat_solver() loss = neg_(sat_val) print(f"Sat={sat_val.item()}, loss={loss.item()}") loss.backward() optimizer.step()
def get_clause(): return { # truthy just adds a sigmoid, to avoid clamping but slows down training 'neg(x)_or_y': or_(neg_(truthy(x)), truthy(y)), # x -> y 'neg(y)_or_z': or_(neg_(truthy(y)), truthy(z)) # y -> z }
def get_clause(): return { 'x': x, 'neg(x)_or_y': or_(neg_(x), y), 'neg(y)_or_z': or_(neg_(y), z) }
def generate_clauses(): return [ truthy(p['x']), or_(neg_(truthy(p['x'])), truthy(p['y'])), or_(neg_(truthy(p['y'])), truthy(p['z'])) ]
from loss import and_, or_, neg_, clause_loss, truthy zero = torch.tensor([0]) one = torch.tensor([1]) assert and_(zero, zero).item() == 0 assert and_(zero, one).item() == 0 assert and_(one, zero).item() == 0 assert and_(one, one).item() == 1 assert or_(zero, zero).item() == 0 assert or_(zero, one).item() == 1 assert or_(one, zero).item() == 1 assert or_(one, one).item() == 1 assert neg_(zero).item() == 1 assert neg_(one).item() == 0 def simple_optim(): """ Try to SAT: x -- and neg(x) or y -- and neg(y) or z should be all 1 neg(x) or y is equivalent to x -> y. Thus the clauses are: x and x -> y and y -> z """