Exemple #1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-e',
                        '--epochs',
                        type=int,
                        default=20,
                        help='Number of training epochs')
    parser.add_argument(
        '-t',
        '--t_max',
        type=int,
        default=25,
        help=
        'Number of iterations t_max for which RUN-CSP runs on each instance')
    parser.add_argument('-b',
                        '--batch_size',
                        type=int,
                        default=64,
                        help='Batch size for training')
    parser.add_argument(
        '-m',
        '--model_dir',
        type=str,
        help='Model directory in which the trained model is stored')
    parser.add_argument(
        '-d',
        '--data_path',
        help='A path to a training set of graphs in the dimacs graph format.')
    parser.add_argument('--n_colors',
                        type=int,
                        default=3,
                        help='Number of colors')
    args = parser.parse_args()

    language = Constraint_Language.get_coloring_language(args.n_colors)

    print('loading graphs...')
    names, graphs = data_utils.load_graphs(args.data_path)
    random.shuffle(graphs)
    print('Converting graphs to CSP Instances')
    instances = [
        CSP_Instance.graph_to_csp_instance(g, language, 'NEQ')
        for g in tqdm(graphs)
    ]

    # combine instances into batches
    train_batches = CSP_Instance.batch_instances(instances, args.batch_size)

    # construct and train new network
    network = RUN_CSP(args.model_dir, language)
    train(network, train_batches, epochs=args.epochs, t_max=args.t_max)
Exemple #2
0
    def load(model_dir):
        """
        Loads a network from its model directory
        :param model_dir: The directory
        :return: The loaded RUN-CSP Network
        """
        with open(os.path.join(model_dir, "parameters.json"), 'r') as f:
            parameters = json.load(f)

        state_size = parameters['state_size']
        language = Constraint_Language.load(os.path.join(model_dir, 'language.json'))

        network = RUN_CSP(model_dir, language, state_size)
        return network
Exemple #3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-m', '--model_dir', type=str, help='Path to the trained RUN-CSP instance')
    parser.add_argument('-t', '--t_max', type=int, default=100, help='Number of iterations t_max for which RUN-CSP runs on each instance')
    parser.add_argument('-a', '--attempts', type=int, default=64, help='Attempts for each graph')
    parser.add_argument('-d', '--data_path', default=None, help='Path to the evaluation data. Expects a directory with graphs in dimacs format.')
    args = parser.parse_args()

    network = RUN_CSP.load(args.model_dir)
    language = Constraint_Language.get_coloring_language(2)

    print('loading graphs...')
    names, graphs = data_utils.load_graphs(args.data_path)
    instances = [CSP_Instance.graph_to_csp_instance(g, language, 'NEQ', name=n) for n, g in zip(names, graphs)]
    
    conflicting_edges = evaluate_boosted(network, instances, args.t_max, attempts=args.attempts)
Exemple #4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-s', '--state_size', type=int, default=128, help='Size of the variable states in RUN-CSP')
    parser.add_argument('-b', '--batch_size', type=int, default=10, help='Batch size used during training')
    parser.add_argument('-e', '--epochs', type=int, default=25, help='Number of training epochs')
    parser.add_argument('-m', '--model_dir', type=str, help='The model directory of a trained network')
    parser.add_argument('-t', '--t_max', type=int, default=30, help='Number of iterations t_max for which RUN-CSP runs on each instance')
    parser.add_argument('-d', '--data_path', help='A path to a training set of graphs in the dimacs graph format')
    args = parser.parse_args()

    language = Constraint_Language.get_coloring_language(2)

    print('loading graphs...')
    names, graphs = data_utils.load_graphs(args.data_path)
    instances = [CSP_Instance.graph_to_csp_instance(g, language, 'NEQ') for g in graphs]

    train_batches = CSP_Instance.batch_instances(instances, args.batch_size)
    network = RUN_CSP(args.model_dir, language=language, state_size=args.state_size)
    train(network, train_batches, t_max=args.t_max, epochs=args.epochs)
Exemple #5
0
 def __init__(self, model_dir, colors=3, state_size=128):
     super().__init__(model_dir, Constraint_Language.get_coloring_language(colors), state_size=state_size)
Exemple #6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-l',
        '--language_config_path',
        type=str,
        help='The path to a json file that specifies the constraint language')
    parser.add_argument(
        '-m',
        '--model_dir',
        type=str,
        help=
        'Path to the model directory where the trained RUN-CSP instance will be stored'
    )
    parser.add_argument('-v',
                        '--n_variables',
                        type=int,
                        default=100,
                        help='Number of variables in each training instance.')
    parser.add_argument(
        '--c_min',
        type=int,
        default=100,
        help='Minimum number of clauses in each training instance.')
    parser.add_argument(
        '--c_max',
        type=int,
        default=600,
        help='Maximum number of clauses in each training instance.')
    parser.add_argument('-i',
                        '--n_instances',
                        type=int,
                        default=4000,
                        help='Number of instances for training.')
    parser.add_argument(
        '-t',
        '--t_max',
        type=int,
        default=30,
        help=
        'Number of iterations t_max for which RUN-CSP runs on each instance')
    parser.add_argument('-s',
                        '--state_size',
                        type=int,
                        default=128,
                        help='Size of the variable states in RUN-CSP')
    parser.add_argument('-b',
                        '--batch_size',
                        type=int,
                        default=10,
                        help='Batch size used during training')
    parser.add_argument('-e',
                        '--epochs',
                        type=int,
                        default=25,
                        help='Number of training epochs')
    args = parser.parse_args()

    print(f'Loading constraint language from {args.language_config_path}')
    language = Constraint_Language.load(args.language_config_path)
    # create RUN_CSP instance for given constraint language
    network = RUN_CSP(args.model_dir, language, args.state_size)

    print(f'Generating {args.n_instances} training instances')
    train_instances = [
        CSP_Instance.generate_random(args.n_variables,
                                     np.random.randint(args.c_min, args.c_max),
                                     language)
        for _ in tqdm(range(args.n_instances))
    ]
    # combine instances into batches
    train_batches = CSP_Instance.batch_instances(train_instances,
                                                 args.batch_size)

    # train and store the network
    train(network, train_batches, args.t_max, args.epochs)