Example #1
0
def main():
    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    encoder = OnePlaneEncoder((go_board_rows, go_board_cols))  # <1>

    processor = GoDataProcessor(encoder=encoder.name())  # <2>

    generator = processor.load_go_data('train', num_games, use_generator=True)  # <3>
    test_generator = processor.load_go_data('test', num_games, use_generator=True)

    # <1> First we create an encoder of board size.
    # <2> Then we initialize a Go Data processor with it.
    # <3> From the processor we create two data generators, for training and testing.
    # end::train_generator_generator[]

    # tag::train_generator_model[]
    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)
    network_layers = small.layers(input_shape)
    model = Sequential()
    for layer in network_layers:
        model.add(layer)
    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
    # end::train_generator_model[]

    # tag::train_generator_fit[]
    epochs = 5
    batch_size = 128
    model.fit_generator(generator=generator.generate(batch_size, num_classes),  # <1>
                        epochs=epochs,
                        steps_per_epoch=generator.get_num_samples() / batch_size,  # <2>
                        validation_data=test_generator.generate(batch_size, num_classes),  # <3>
                        validation_steps=test_generator.get_num_samples() / batch_size,  # <4>
                        callbacks=[ModelCheckpoint('./checkpoints/small_model_epoch_{epoch}.h5')])  # <5>

    try:
        with tf.device('/device:GPU:0'):
            model.evaluate_generator(generator=test_generator.generate(batch_size, num_classes),
                                     steps=test_generator.get_num_samples() / batch_size)  # <6>
    except RuntimeError as e:
        print(e)
Example #2
0
from dlgo.data.parallel_processor import GoDataProcessor
from dlgo.encoders.oneplane import OnePlaneEncoder
from dlgo.networks import small
from keras.models import Sequential
from keras.layers.core import Dense
from keras.callbacks import ModelCheckpoint

if __name__ == '__main__':
    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    encoder = OnePlaneEncoder((go_board_rows, go_board_cols))
    processor = GoDataProcessor(encoder=encoder.name())

    generator = processor.load_go_data('train', num_games, use_generator=True)
    test_generator = processor.load_go_data('test',
                                            num_games,
                                            use_generator=True)

    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)
    network_layers = small.layers(input_shape)

    model = Sequential()
    for layer in network_layers:
        model.add(layer)
    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer='adagrad',
                  metrics=['accuracy'])
def model03():
    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Start model03(): ' + mainmodel_start_time)

    optimizer = Adagrad()
    # optimizer = Adadelta()
    # optimizer = SGD(lr=0.01, momentum=0.9, decay=0.001)

    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    one_plane_encoder = OnePlaneEncoder((go_board_rows, go_board_cols))
    seven_plane_encoder = SevenPlaneEncoder((go_board_rows, go_board_cols))
    simple_encoder = SimpleEncoder((go_board_rows, go_board_cols))

    encoder = seven_plane_encoder

    processor = GoDataProcessor(encoder=encoder.name())

    train_generator = processor.load_go_data('train',
                                             num_games,
                                             use_generator=True)
    test_generator = processor.load_go_data('test',
                                            num_games,
                                            use_generator=True)

    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)

    network_large = large
    network_small = small

    network = network_small
    network_layers = network.layers(input_shape)
    model = Sequential()
    for layer in network_layers:
        model.add(layer)

    model.add(Dense(num_classes, activation='relu'))

    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])
    epochs = 5
    batch_size = 128
    model.fit_generator(
        generator=train_generator.generate(batch_size, num_classes),
        epochs=epochs,
        steps_per_epoch=train_generator.get_num_samples() / batch_size,
        validation_data=test_generator.generate(batch_size, num_classes),
        validation_steps=test_generator.get_num_samples() / batch_size,
        callbacks=[
            ModelCheckpoint(
                filepath=
                'D:\\CODE\\Python\\Go\\code\\dlgo\\data\\checkpoints\\small_epoch_{epoch:02d}'
                '-acc-{accuracy:.4f}-val_acc_{'
                'val_accuracy:.4f}f.h5',
                monitor='accuracy')
        ])
    model.evaluate_generator(
        generator=test_generator.generate(batch_size, num_classes),
        steps=test_generator.get_num_samples() / batch_size)
def mainmodel():
    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): ' + mainmodel_start_time)

    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    encoder = OnePlaneEncoder((go_board_rows, go_board_cols))  # 1 plane
    # encoder = SevenPlaneEncoder((go_board_rows, go_board_cols))   # 7 planes
    # encoder = SimpleEncoder((go_board_rows, go_board_cols))       # 11 planes

    processor = GoDataProcessor(encoder=encoder.name())

    generator = processor.load_go_data('train',
                                       num_games)  #, use_generator=True
    test_generator = processor.load_go_data('test',
                                            num_games)  #, use_generator=True

    # generator = processor.load_go_data('train', num_games, use_generator=True)
    # test_generator = processor.load_go_data('test', num_games, use_generator=True)

    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)

    # network_layers = small.layers(input_shape)
    network_layers = medium.layers(input_shape)

    model = Sequential()
    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): before adding all layers: ' +
          mainmodel_start_time)

    for layer in network_layers:
        mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        print('Before model.add(' + layer.name + '): ' + mainmodel_start_time)
        model.add(layer)
        mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        print('After model.add(' + layer.name + '): ' + mainmodel_start_time)

    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): after adding all layers: ' +
          mainmodel_start_time)

    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer='sgd',
                  metrics=['accuracy'])

    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): after compiling the model: ' +
          mainmodel_start_time)

    # For more information:
    # https://keras.io/callbacks/
    epochs = 2
    batch_size = 128
    model.fit_generator(
        # model.fit(
        generator=generator.generate(batch_size, num_classes),
        epochs=epochs,
        steps_per_epoch=generator.get_num_samples() / batch_size,
        validation_data=test_generator.generate(batch_size, num_classes),
        validation_steps=test_generator.get_num_samples() / batch_size,
        callbacks=[
            ModelCheckpoint(
                'data\\checkpoints\\small_model_epoch_{epoch:02d}-{val_loss:.2f}.h5'
            ),
            # EarlyStopping(monitor='accuracy'),
            # ProgbarLogger(),
            # CSVLogger('data\\logs\\training.log'),
            # TensorBoard(log_dir='data\\logs', batch_size=128, write_images=True)
        ])
    model.evaluate_generator(
        generator=test_generator.generate(batch_size, num_classes),
        steps=test_generator.get_num_samples() / batch_size)