Beispiel #1
0
def load_model(args, log_file=None):
    net = NeuroSAT(args)
    net = net.cuda()
    if args.restore:
        if log_file is not None:
            print('restoring from', args.restore, file=log_file, flush=True)
        model = torch.load(args.restore)
        net.load_state_dict(model['state_dict'])

    return net
Beispiel #2
0
    def __init__(self, cfg, gpu_id, gpu_frac):
        if gpu_id is not None:
            os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)

        self.queue = queue.Queue()

        tfconfig = tf.ConfigProto()
        tfconfig.gpu_options.per_process_gpu_memory_fraction = gpu_frac
        self.sess = tf.Session(config=tfconfig)

        self.n_vars = tf.placeholder(dtype=tf.int32,
                                     shape=[],
                                     name="Placeholder_n_vars")
        self.n_clauses = tf.placeholder(dtype=tf.int32,
                                        shape=[],
                                        name="Placeholder_n_clauses")
        self.LC_idxs = tf.placeholder(dtype=tf.int32,
                                      shape=[None, 2],
                                      name="Placeholder_LC_idxs")

        self.neurosat = NeuroSAT(
            cfg,
            NeuroSATArgs(n_vars=self.n_vars,
                         n_clauses=self.n_clauses,
                         LC_idxs=self.LC_idxs))

        tvars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)

        self.assign_placeholders = {
            tvar.name:
            tf.placeholder(tvar.value().dtype,
                           tvar.get_shape().as_list(),
                           name="Placeholder_%s" % tvar.name.split(":")[0])
            for tvar in tvars
        }
        self.assign_ops = [
            tvar.assign(self.assign_placeholders[tvar.name]) for tvar in tvars
        ]
Beispiel #3
0
from options import add_neurosat_options
from neurosat import NeuroSAT
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

parser = argparse.ArgumentParser()
add_neurosat_options(parser)

parser.add_argument('test_dir',
                    action='store',
                    type=str,
                    help='Directory with directories of testation data')
parser.add_argument('restore_id', action='store', type=int)
parser.add_argument('restore_epoch', action='store', type=int)
parser.add_argument('n_rounds', action='store', type=int)

opts = parser.parse_args()
setattr(opts, 'run_id', None)
setattr(opts, 'n_saves_to_keep', 1)

print(opts)

g = NeuroSAT(opts)
g.restore()

results = g.test(opts.test_dir)
for (test_filename, etest_cost, etest_mat) in results:
    print("%s %.4f (%.2f, %.2f, %.2f, %.2f)" %
          (test_filename, etest_cost, etest_mat.ff, etest_mat.ft, etest_mat.tf,
           etest_mat.tt))
from options import add_neurosat_options
from neurosat import NeuroSAT
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

parser = argparse.ArgumentParser()
add_neurosat_options(parser)

parser.add_argument('solve_dir', action='store', type=str)
parser.add_argument('restore_id', action='store', type=int)
parser.add_argument('restore_epoch', action='store', type=int)
parser.add_argument('n_rounds', action='store', type=int)

opts = parser.parse_args()
setattr(opts, 'run_id', None)
setattr(opts, 'n_saves_to_keep', 1)

print(opts)

g = NeuroSAT(opts)
g.restore()

filenames = [opts.solve_dir + "/" + f for f in os.listdir(opts.solve_dir)]
for filename in filenames:
    with open(filename, 'rb') as f:
        problems = pickle.load(f)

    for problem in problems:
        solutions = g.find_solutions(problem)
        for batch, solution in enumerate(solutions):
            print("[%s] %s" % (problem.dimacs[batch], str(solution)))
Beispiel #5
0
import os
from tqdm import tqdm

import torch
import torch.nn as nn
import torch.optim as optim

from neurosat import NeuroSAT
from data_maker import generate
import mk_problem

from config import parser

args = parser.parse_args()

net = NeuroSAT(args)
net = net.cuda()

task_name = args.task_name + '_sr' + str(args.min_n) + 'to' + str(
    args.max_n) + '_ep' + str(args.epochs) + '_nr' + str(
        args.n_rounds) + '_d' + str(args.dim)
log_file = open(os.path.join(args.log_dir, task_name + '.log'), 'a+')
detail_log_file = open(os.path.join(args.log_dir, task_name + '_detail.log'),
                       'a+')

train, val = None, None
if args.train_file is not None:
    with open(os.path.join(args.data_dir, 'train', args.train_file),
              'rb') as f:
        train = pickle.load(f)
Beispiel #6
0
                    default=100000,
                    help='Number of epochs through data')
parser.add_argument('--n_saves_to_keep',
                    action='store',
                    dest='n_saves_to_keep',
                    type=int,
                    default=4,
                    help='Number of saved models to keep')

opts = parser.parse_args()

setattr(opts, 'commit',
        subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip())
setattr(opts, 'hostname', subprocess.check_output(['hostname']).strip())

if opts.run_id is None: opts.run_id = random.randrange(sys.maxsize)

print(opts)

if not os.path.exists("snapshots/"):
    os.mkdir("snapshots")

g = NeuroSAT(opts)

for epoch in range(opts.n_epochs):
    result = g.train_epoch(epoch)
    (efilename, etrain_cost, etrain_mat, lr, etime) = result
    print("[%d] %.4f (%.2f, %.2f, %.2f, %.2f) [%ds]" %
          (epoch, etrain_cost, etrain_mat.ff, etrain_mat.ft, etrain_mat.tf,
           etrain_mat.tt, etime))
Beispiel #7
0
    def __init__(self, cfg, replay_buffer, outqueue):
        self.cfg = cfg
        self.replay_buffer = replay_buffer
        self.outqueue = outqueue

        config = tf.ConfigProto()
        self.sess = tf.Session(config=config)
        tf.set_random_seed(cfg['seed'])
        np.random.seed(cfg['seed'])

        def get_next_datapoint():
            while True:
                datapoints = self.replay_buffer.sample_datapoints(n_samples=1)
                if not datapoints:
                    print("[LEARNER:GENERATOR] going to sleep...")
                    time.sleep(20)
                    print("[LEARNER:GENERATOR] waking up...")
                else:
                    assert (len(datapoints) == 1)
                    dp = datapoints[0]
                    yield dp.n_vars, dp.n_clauses, dp.LC_idxs, dp.target_var, dp.target_sl_esteps

        dataset = tf.data.Dataset.from_generator(
            get_next_datapoint,
            (tf.int32, tf.int32, tf.int32, tf.int32, tf.float32),
            (tf.TensorShape([]), tf.TensorShape([]), tf.TensorShape(
                [None, 2]), tf.TensorShape([]), tf.TensorShape([])))

        dataset = dataset.prefetch(cfg['prefetch'])
        (n_vars, n_clauses, LC_idxs, target_var,
         target_sl_esteps) = dataset.make_one_shot_iterator().get_next()

        self.neurosat = NeuroSAT(
            cfg,
            NeuroSATArgs(n_vars=n_vars, n_clauses=n_clauses, LC_idxs=LC_idxs))
        self.p_cost = cfg[
            'p_cost_scale'] * tf.nn.sparse_softmax_cross_entropy_with_logits(
                logits=self.neurosat.logits, labels=target_var)
        self.v_cost = cfg['v_cost_scale'] * tf.square(self.neurosat.sl_esteps -
                                                      target_sl_esteps)
        self.l2_cost = cfg['l2_cost_scale'] * build_l2_cost()
        self.loss = self.p_cost + self.v_cost + self.l2_cost
        self.global_step = tf.get_variable("global_step",
                                           shape=[],
                                           initializer=tf.zeros_initializer(),
                                           trainable=False)
        self.learning_rate = build_learning_rate(cfg, self.global_step)
        self.apply_gradients = build_apply_gradients(cfg, self.loss,
                                                     self.learning_rate,
                                                     self.global_step)

        self.target_sl_esteps = target_sl_esteps
        self.declare_summaries()

        tf.global_variables_initializer().run(session=self.sess)
        self.saver = tf.train.Saver(max_to_keep=cfg['max_saves_to_keep'])

        if cfg['restore_path'] != "none":
            print("Restoring from %s..." % cfg['restore_path'])
            self.saver.restore(self.sess, cfg['restore_path'])

        self.tvars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
        self.weights = self._extract_weights()
Beispiel #8
0
parser.add_argument('restore_id', action='store', type=int)
parser.add_argument('restore_epoch', action='store', type=int)
parser.add_argument('n_rounds', action='store', type=int)
parser.add_argument('dir', action='store', type=str)
opts = parser.parse_args()
setattr(opts, 'run_id', None)
setattr(opts, 'n_saves_to_keep', 1)
setattr(opts, 'n_rounds', 26)
setattr(opts, 'dir', None)

def distance(a,b):
	add = 0
	for i,_ in enumerate(a):
		add+=(a[i]-b[i])**2
	return add
g = NeuroSAT(opts)
g.restore()
dir = sys.argv[-1]
if not os.path.exists("./temp/real_pack/"):
	os.mkdir("./temp/real_pack/")
fun_generate(1,dir, "./temp/real_pack/")
# 这是一个数据格式包目录
opts.solve_dir = "./temp/real_pack/"
filenames = [opts.solve_dir + "/" + f for f in os.listdir(opts.solve_dir)]
num = 0
for filename in filenames:
	with open(filename, 'rb') as f:
		problems = pickle.load(f)
	for problem in problems:
		num += 1
		s = time.time()
Beispiel #9
0
import os
import argparse
from options import add_neurosat_options
from neurosat import NeuroSAT

parser = argparse.ArgumentParser()
add_neurosat_options(parser)

parser.add_argument('valid_dir',
                    action='store',
                    type=str,
                    help='Directory with directories of validation data')
parser.add_argument('restore_id', action='store', type=int)
parser.add_argument('restore_epoch', action='store', type=int)
parser.add_argument('n_rounds', action='store', type=int)

opts = parser.parse_args()
setattr(opts, 'run_id', None)
setattr(opts, 'n_saves_to_keep', 1)

print(opts)

g = NeuroSAT(opts)
g.restore()

results = g.validate(opts.valid_dir)
for (valid_filename, evalid_cost, evalid_mat) in results:
    print("%s %.4f (%.2f, %.2f, %.2f, %.2f)" %
          (valid_filename, evalid_cost, evalid_mat.ff, evalid_mat.ft,
           evalid_mat.tf, evalid_mat.tt))