def main(): parser = argparse.ArgumentParser() parser.add_argument('-s', '--state_size', type=int, default=128, help='Size of the variable states in RUN-CSP') parser.add_argument('-k', '--kappa', type=float, default=1.0, help='The parameter kappa for the loss function') parser.add_argument('-e', '--epochs', type=int, default=25, help='Number of training epochs') parser.add_argument( '-t', '--t_max', type=int, default=30, help= 'Number of iterations t_max for which RUN-CSP runs on each instance') parser.add_argument('-b', '--batch_size', type=int, default=10, help='Batch size for training') parser.add_argument( '-m', '--model_dir', type=str, help='Model directory in which the trained model is stored') parser.add_argument( '-d', '--data_path', help='A path to a training set of graphs in the dimacs format.') args = parser.parse_args() print('loading graphs...') names, graphs = data_utils.load_graphs(args.data_path) random.shuffle(graphs) print('Converting graphs to CSP Instances') instances = [ CSP_Instance.graph_to_csp_instance(g, is_language, 'NAND') for g in graphs ] # combine instances into batches train_batches = CSP_Instance.batch_instances(instances, args.batch_size) # construct new network network = Max_IS_Network(args.model_dir, state_size=args.state_size) train(network, train_batches, t_max=args.t_max, epochs=args.epochs)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-e', '--epochs', type=int, default=20, help='Number of training epochs') parser.add_argument( '-t', '--t_max', type=int, default=25, help= 'Number of iterations t_max for which RUN-CSP runs on each instance') parser.add_argument('-b', '--batch_size', type=int, default=64, help='Batch size for training') parser.add_argument( '-m', '--model_dir', type=str, help='Model directory in which the trained model is stored') parser.add_argument( '-d', '--data_path', help='A path to a training set of graphs in the dimacs graph format.') parser.add_argument('--n_colors', type=int, default=3, help='Number of colors') args = parser.parse_args() language = Constraint_Language.get_coloring_language(args.n_colors) print('loading graphs...') names, graphs = data_utils.load_graphs(args.data_path) random.shuffle(graphs) print('Converting graphs to CSP Instances') instances = [ CSP_Instance.graph_to_csp_instance(g, language, 'NEQ') for g in tqdm(graphs) ] # combine instances into batches train_batches = CSP_Instance.batch_instances(instances, args.batch_size) # construct and train new network network = RUN_CSP(args.model_dir, language) train(network, train_batches, epochs=args.epochs, t_max=args.t_max)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-m', '--model_dir', type=str, help='Path to the trained RUN-CSP instance') parser.add_argument('-t', '--t_max', type=int, default=100, help='Number of iterations t_max for which RUN-CSP runs on each instance') parser.add_argument('-a', '--attempts', type=int, default=64, help='Attempts for each graph') parser.add_argument('-d', '--data_path', default=None, help='Path to the evaluation data. Expects a directory with graphs in dimacs format.') args = parser.parse_args() network = Max_IS_Network.load(args.model_dir) print('loading graphs...') names, graphs = data_utils.load_graphs(args.data_path) instances = [CSP_Instance.graph_to_csp_instance(g, is_language, 'NAND') for n, g in zip(names, graphs)] evaluate_boosted(network, instances, args.t_max, attempts=args.attempts)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-s', '--state_size', type=int, default=128, help='Size of the variable states in RUN-CSP') parser.add_argument('-b', '--batch_size', type=int, default=10, help='Batch size used during training') parser.add_argument('-e', '--epochs', type=int, default=25, help='Number of training epochs') parser.add_argument('-m', '--model_dir', type=str, help='The model directory of a trained network') parser.add_argument('-t', '--t_max', type=int, default=30, help='Number of iterations t_max for which RUN-CSP runs on each instance') parser.add_argument('-d', '--data_path', help='A path to a training set of graphs in the dimacs graph format') args = parser.parse_args() language = Constraint_Language.get_coloring_language(2) print('loading graphs...') names, graphs = data_utils.load_graphs(args.data_path) instances = [CSP_Instance.graph_to_csp_instance(g, language, 'NEQ') for g in graphs] train_batches = CSP_Instance.batch_instances(instances, args.batch_size) network = RUN_CSP(args.model_dir, language=language, state_size=args.state_size) train(network, train_batches, t_max=args.t_max, epochs=args.epochs)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-m', '--model_dir', type=str, help='Path to the trained RUN-CSP instance') parser.add_argument( '-t', '--t_max', type=int, default=100, help= 'Number of iterations t_max for which RUN-CSP runs on each instance') parser.add_argument('-a', '--attempts', type=int, default=64, help='Attempts for each graph') parser.add_argument( '-d', '--data_path', default=None, help= 'Path to the evaluation data. Expects a directory with graphs in dimacs format.' ) parser.add_argument( '-v', '--n_variables', type=int, default=400, help= 'Number of variables in each training instance. Only used when --data_path is not specified.' ) parser.add_argument( '-c', '--n_clauses', type=int, default=1000, help= 'Number of clauses in each training instance. Only used when --data_path is not specified.' ) parser.add_argument( '-i', '--n_instances', type=int, default=100, help= 'Number of instances for training. Only used when --data_path is not specified.' ) args = parser.parse_args() network = RUN_CSP.load(args.model_dir) language = network.language if args.data_path is not None: print('loading graphs...') names, graphs = data_utils.load_graphs(args.data_path) instances = [ CSP_Instance.graph_to_csp_instance(g, language, 'NEQ', name=n) for n, g in zip(names, graphs) ] else: print(f'Generating {args.n_instances} training instances') instances = [ CSP_Instance.generate_random(args.n_variables, args.n_clauses, language) for _ in tqdm(range(args.n_instances)) ] conflicting_edges = evaluate_boosted(network, instances, args.t_max, attempts=args.attempts)