Ejemplo n.º 1
0
def run_devol():
    X_train, y_train, X_test = load_dog_breed()
    genome_handler = GenomeHandler(
        max_conv_layers=6,
        max_dense_layers=2,  # includes final dense layer
        max_filters=256,
        max_dense_nodes=1024,
        input_shape=X_train.shape[1:],
        n_classes=120)
    devol = DEvol(genome_handler)
    dataset = ((X_train, y_train), (X_train))
    model = devol.run(dataset=dataset,
                      num_generations=20,
                      pop_size=20,
                      epochs=5)
    print(model.summary())
Ejemplo n.º 2
0
def evolveNetwork(trainCandles, testCandles):

    dataset = ((trainCandles['input'], trainCandles['target']),
               (testCandles['input'], testCandles['target']))

    genome_handler = GenomeHandler(max_conv_layers=2, 
                                   max_dense_layers=3, # includes final dense layer
                                   max_filters=128,
                                   max_dense_nodes=1024,
                                   input_shape=(6, 5, 1),
                                   n_classes=trainCandles['target'].shape[-1])

    devol = DEvol(genome_handler)
    model = devol.run(
        dataset=dataset,
        num_generations=20,
        pop_size=20,
        epochs=5)

    print(model.summary())
Ejemplo n.º 3
0
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
dataset = ((x_train, y_train), (x_test, y_test))

# **Prepare the genome configuration**
# The `GenomeHandler` class handles the constraints that are imposed upon
# models in a particular genetic program. See `genome-handler.py`
# for more information.

max_conv_layers = 6
max_dense_layers = 2  # including final softmax layer
max_conv_kernals = 256
max_dense_nodes = 1024
input_shape = x_train.shape[1:]
num_classes = 10

genome_handler = GenomeHandler(max_conv_layers, max_dense_layers, max_conv_kernals, \
                    max_dense_nodes, input_shape, num_classes)

# **Create and run the genetic program**
# The next, and final, step is create a `DEvol` and run it. Here we specify
# a few settings pertaining to the genetic program. The program
# will save each genome's encoding, as well as the model's loss and
# accuracy, in a `.csv` file printed at the beginning of program.

num_generations = 10
population_size = 2
num_epochs = 1

devol = DEvol(genome_handler)
devol.run(dataset, num_generations, population_size, num_epochs)
X_train, y_train, X_validation, y_validation, _ = load_process_data(
    'train.csv', 'validate.csv', 'test.csv')
y_train = to_categorical(y_train)
y_validation = to_categorical(y_validation)
dataset = ((X_train, y_train), (X_validation, y_validation))

# **Prepare the genome configuration**
# The `GenomeHandler` class handles the constraints that are imposed upon
# models in a particular genetic program. See `genome-handler.py`
# for more information.

genome_handler = GenomeHandler(
    max_conv_layers=0,
    max_dense_layers=9,  # includes final dense layer
    max_filters=256,
    max_dense_nodes=2048,
    input_shape=X_train.shape[1:],
    n_classes=2)

# **Create and run the genetic program**
# The next, and final, step is create a `DEvol` and run it. Here we specify
# a few settings pertaining to the genetic program. The program
# will save each genome's encoding, as well as the model's loss and
# accuracy, in a `.csv` file printed at the beginning of program.
# The best model is returned decoded and with `epochs` training done.

devol = DEvol(genome_handler)
model = devol.run(dataset=dataset, num_generations=8, pop_size=10, epochs=200)
print(model.summary())
Ejemplo n.º 5
0
dataset = ((x_train, y_train), (x_test, y_test))

# **Prepare the genome configuration**
# The `GenomeHandler` class handles the constraints that are imposed upon
# models in a particular genetic program. See `genome-handler.py`
# for more information.

max_conv_layers = 6
max_dense_layers = 2  # including final softmax layer
max_conv_kernals = 256
max_dense_nodes = 1024
input_shape = x_train.shape[1:]
num_classes = 10

genome_handler = GenomeHandler(max_conv_layers, max_dense_layers, max_conv_kernals, \
                    max_dense_nodes, input_shape, num_classes)

# **Create and run the genetic program**
# The next, and final, step is create a `DEvol` and run it. Here we specify
# a few settings pertaining to the genetic program. The program
# will save each genome's encoding, as well as the model's loss and
# accuracy, in a `.csv` file printed at the beginning of program.

num_generations = 10
population_size = 10
num_epochs = 1

devol = DEvol(genome_handler, 'genomes.csv')
model = devol.run(dataset, num_generations, population_size, num_epochs)
model.summary()
Ejemplo n.º 6
0
def main():
    sess = tf.Session()
    with sess.as_default():
        parser = argparse.ArgumentParser(description="Halite 2 ML Training")
        parser.add_argument("--model_name", help="Name of the model", default="keras-model.h5")
        parser.add_argument("--minibatch_size", help="Size of the minibatch", default=100, type=int)
        parser.add_argument("--steps", help="Number of steps", default=1000, type=int)
        parser.add_argument("--games_limit", help="Number of games", default=1000, type=int)
        parser.add_argument("--data", help="Location of Replays", default="data/sample/")
        parser.add_argument("--cache", help="Model to Load", default=None)
        parser.add_argument("--load_data", help="Load Features from file", default=True, type=bool)
        parser.add_argument("--pack", help="Which replay pack to use", default="all")
        parser.add_argument("--load_weights", help="Load weights", default=False, type=bool)
        parser.add_argument("--lr", help="Learning Rate", default=1e-3, type=float)
        parser.add_argument("--evo", help="use Genetic evolution", default=False, type=bool)
        args = parser.parse_args()
        if not args.load_data:
            if args.seed:
                np.random.seed(args.seed)
            if args.data.endswith('.zip'):
                raw_data = fetch_data_zip(args.data, args.games_limit)
            else:
                raw_data = fetch_data_dir(args.data, args.games_limit)
            data_input, data_output = parse(raw_data, None, args.dump_features_location)
        else:
            data_input, data_output = load_data(pack=args.pack)
        data_size = len(data_input)
        training_input, training_output = data_input, data_output

        training_data_size = len(training_input)
        # randomly permute the data
        permutation = np.random.permutation(training_data_size)
        training_input, training_output = training_input[permutation], training_output[permutation]

        if not args.evo:
            kmodel = KerasModel(args.model_name, args.load_weights, training=True,
                                batch_size=args.minibatch_size, lr=args.lr)
            model = kmodel.model
            model.summary()
            eval_input = kmodel.normalize_input(training_input)
            for i in range(10):
                preds = kmodel.predict(training_input[i])
                print("Pred {}".format(preds))
                count = 0
                true_count = 0
                for i, v in enumerate(preds):
                    count += 1
                    as_perc = round(v, 3)*100
                    t_as_perc = round(training_output[0][i], 3)*100
                    if as_perc == t_as_perc: true_count += 1
                    print("{0:.2f} vs {1:.2f} | {2}".format(as_perc, t_as_perc, as_perc == t_as_perc))
                print("{0}/{1} = {2:.2f}%".format(true_count, count, true_count/count*100))

            score = model.evaluate(eval_input, training_output, verbose=1)
            print("\nInitial: loss: {0:.2f}, acc: {1:.2f}%".format(score[0], score[1] * 100))
            print("Metrics: {}".format(model.metrics_names))
            history = kmodel.fit(training_input, training_output, batch_size=args.minibatch_size, epochs=args.steps)

            current_directory = os.path.dirname(os.path.abspath(__file__))
            model_path = os.path.join(current_directory, os.path.pardir, "models/")
            kmodel.save(model_path)
            summary(history, model_path)
        else:
            max_conv_layers = 2
            max_dense_layers = 4
            max_conv_kernels = 128
            max_dense_nodes = 512
            input_shape = data_input.shape[1:]
            num_classes = 28
            genome_handler = BotGenomeHandler(max_conv_layers, max_dense_layers, max_conv_kernels, max_dense_nodes, input_shape, num_classes)
            num_generations = 20
            population_size = 30
            num_epochs = 1
            devol = DEvol(genome_handler)
            perc = int(training_data_size * .8)

            x_train, x_test = training_input[perc:], training_input[:perc]
            y_train, y_test = training_output[perc:], training_output[:perc]

            dataset = ((x_train, y_train), (x_test, y_test))
            model, accuracy, loss = devol.run(dataset, num_generations, population_size, num_epochs)
            model.summary()
            print("Accuracy: {}\tLoss: {}".format(accuracy, loss))
Ejemplo n.º 7
0
# **Prepare the genome configuration**
# The `GenomeHandler` class handles the constraints that are imposed upon
# models in a particular genetic program. See `genome-handler.py`
# for more information.

max_conv_layers = 6
max_dense_layers = 2  # including final softmax layer
max_conv_kernals = 256
max_dense_nodes = 1024
input_shape = x_train.shape[1:]
num_classes = 10

genome_handler = GenomeHandler(max_conv_layers, max_dense_layers, max_conv_kernals, \
                    max_dense_nodes, input_shape, num_classes)

# **Create and run the genetic program**
# The next, and final, step is create a `DEvol` and run it. Here we specify
# a few settings pertaining to the genetic program. The program
# will save each genome's encoding, as well as the model's loss and
# accuracy, in a `.csv` file printed at the beginning of program.

num_generations = 10
population_size = 10
num_epochs = 1

devol = DEvol(genome_handler)
model, accuracy = devol.run(dataset, num_generations, population_size,
                            num_epochs)
print model.summary()
Ejemplo n.º 8
0
# prepare it for use by the GPU. We also do a one-hot encoding of the labels.

(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], 1, 28, 28).astype('float32') / 255
x_test = x_test.reshape(x_test.shape[0], 1, 28, 28).astype('float32') / 255
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
dataset = ((x_train, y_train), (x_test, y_test))

# **Prepare the genome configuration**
# The `GenomeHandler` class handles the constraints that are imposed upon 
# models in a particular genetic program. In this example, a genome is 
# allowed **up to** 6 convolutional layeres, 3 dense layers, 256 feature 
# maps in each convolution, and 1024 nodes in each dense layer. It also 
# specifies three possible activation functions. See `genome-handler.py` 
# for more information.

genome_handler = GenomeHandler(6, 3, 256, 1024, x_train.shape[1:], 
                               activations=["relu", "sigmoid"])

# **Create and run the genetic program**
# The next, and final, step is create a `DEvol` and run it. Here we specify 
# a few settings pertaining to the genetic program. In this example, we 
# have 10 generations of evolution, 20 members in each population, and 1 
# epoch of training used to evaluate each model's fitness. The program 
# will save each genome's encoding, as well as the model's loss and 
# accuracy, in a `.csv` file printed at the beginning of program.

devol = DEvol(genome_handler)
devol.run(dataset, 10, 20, 1)
Ejemplo n.º 9
0
    shuffle(combined)
    x = np.array([item[0] for item in combined])
    y = np.array([item[1] for item in combined])
    ind = int(train_prop * len(combined))
    return ((x[:ind], y[:ind]), (x[ind:], y[ind:]))


iris = datasets.load_iris()
x = iris.data
y = to_categorical(iris.target)

# define model constraints

max_conv_layers = 0
max_dense_layers = 3  # including final softmax layer
max_conv_kernals = 0
max_dense_nodes = 1024
input_shape = x.shape[1:]
num_classes = 3

genome_handler = GenomeHandler(max_conv_layers, max_dense_layers, max_conv_kernals, \
                    max_dense_nodes, input_shape, num_classes)

# create and run a DEvol

num_generations = 5
population_size = 5
num_epochs = 20

devol = DEvol(genome_handler)
devol.run(prepData(x, y), num_generations, population_size, num_epochs)