Example #1
0
def main():
    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    encoder = OnePlaneEncoder((go_board_rows, go_board_cols))  # <1>

    processor = GoDataProcessor(encoder=encoder.name())  # <2>

    generator = processor.load_go_data('train', num_games, use_generator=True)  # <3>
    test_generator = processor.load_go_data('test', num_games, use_generator=True)

    # <1> First we create an encoder of board size.
    # <2> Then we initialize a Go Data processor with it.
    # <3> From the processor we create two data generators, for training and testing.
    # end::train_generator_generator[]

    # tag::train_generator_model[]
    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)
    network_layers = small.layers(input_shape)
    model = Sequential()
    for layer in network_layers:
        model.add(layer)
    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
    # end::train_generator_model[]

    # tag::train_generator_fit[]
    epochs = 5
    batch_size = 128
    model.fit_generator(generator=generator.generate(batch_size, num_classes),  # <1>
                        epochs=epochs,
                        steps_per_epoch=generator.get_num_samples() / batch_size,  # <2>
                        validation_data=test_generator.generate(batch_size, num_classes),  # <3>
                        validation_steps=test_generator.get_num_samples() / batch_size,  # <4>
                        callbacks=[ModelCheckpoint('./checkpoints/small_model_epoch_{epoch}.h5')])  # <5>

    try:
        with tf.device('/device:GPU:0'):
            model.evaluate_generator(generator=test_generator.generate(batch_size, num_classes),
                                     steps=test_generator.get_num_samples() / batch_size)  # <6>
    except RuntimeError as e:
        print(e)
Example #2
0
if __name__ == '__main__':
    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    encoder = OnePlaneEncoder((go_board_rows, go_board_cols))
    processor = GoDataProcessor(encoder=encoder.name())

    generator = processor.load_go_data('train', num_games, use_generator=True)
    test_generator = processor.load_go_data('test',
                                            num_games,
                                            use_generator=True)

    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)
    network_layers = small.layers(input_shape)

    model = Sequential()
    for layer in network_layers:
        model.add(layer)
    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer='adagrad',
                  metrics=['accuracy'])

    epochs = 20
    batch_size = 128

    model.fit_generator(
        generator=generator.generate(batch_size, num_classes),
        epochs=epochs,
def mainmodel():
    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): ' + mainmodel_start_time)

    go_board_rows, go_board_cols = 19, 19
    num_classes = go_board_rows * go_board_cols
    num_games = 100

    # encoder = OnePlaneEncoder((go_board_rows, go_board_cols))       # 1 plane
    # encoder = SevenPlaneEncoder((go_board_rows, go_board_cols))   # 7 planes
    encoder = SimpleEncoder((go_board_rows, go_board_cols))  # 11 planes

    processor = GoDataProcessor(
        encoder=encoder.name(),
        data_directory='D:\\CODE\\Python\\Go\\code\\dlgo\\data\\tarfiles')

    generator = processor.load_go_data('train',
                                       num_games)  #, use_generator=True
    test_generator = processor.load_go_data('test',
                                            num_games)  #, use_generator=True

    # generator = processor.load_go_data('train', num_games, use_generator=True)
    # test_generator = processor.load_go_data('test', num_games, use_generator=True)

    input_shape = (encoder.num_planes, go_board_rows, go_board_cols)
    network_layers = small.layers(input_shape)
    model = Sequential()
    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): before adding all layers: ' +
          mainmodel_start_time)

    for layer in network_layers:
        mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        print('Before model.add(' + layer.name + '): ' + mainmodel_start_time)
        model.add(layer)
        mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        print('After model.add(' + layer.name + '): ' + mainmodel_start_time)

    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): after adding all layers: ' +
          mainmodel_start_time)

    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer='sgd',
                  metrics=['accuracy'])

    mainmodel_start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    print('Inside mainmodel(): after compiling the model: ' +
          mainmodel_start_time)

    # For more information:
    # https://keras.io/callbacks/
    epochs = 2
    batch_size = 128
    # model.fit_generator(
    model.fit(
        # generator=generator.generate(batch_size, num_classes),
        epochs=epochs,
        steps_per_epoch=generator.get_num_samples() / batch_size,
        validation_data=test_generator.generate(batch_size, num_classes),
        validation_steps=test_generator.get_num_samples() / batch_size,
        use_multiprocessing=True,
        callbacks=[
            ModelCheckpoint(
                filepath=
                'D:\\CODE\\Python\\Go\\code\\dlgo\\data\\checkpoints\\small_model_epoch_{epoch:02d}-{val_accuracy:.4f}.h5',
                monitor='accuracy'),
            # EarlyStopping(monitor='accuracy'),
            # ProgbarLogger(),
            # CSVLogger('D:\\CODE\\Python\\Go\\code\\dlgo\\data\\logs\\training.log', append=True),
            # TensorBoard(log_dir='data\\logs', batch_size=128, write_images=True)
        ])
    model.evaluate_generator(
        generator=test_generator.generate(batch_size, num_classes),
        steps=test_generator.get_num_samples() / batch_size)