segmenter.model,
                       save_best_only=True,
                       save_weights_only=True,
                       monitor='val_loss',
                       verbose=0)
    log = TensorBoard(log_dir='logs',
                      histogram_freq=0,
                      batch_size=data_loader.batch_size,
                      write_graph=True,
                      write_grads=False)

    # Use LRFinder to find effective learning rate
    lr_finder = LRFinder(1e-6, 1e-2, steps_per_epoch, epochs=1)  # => (1e-4, 1e-3)
    lr_scheduler = SGDRScheduler(min_lr=1e-4, max_lr=1e-3,
                                 initial_epoch=initial_epoch,
                                 steps_per_epoch=steps_per_epoch,
                                 cycle_length=10,
                                 lr_decay=0.9,
                                 mult_factor=1.2)

    X_train, Y_train, X_valid, Y_valid = DataLoader.load_data(h5_dataset_path, frac=0.9)

    segmenter.parallel_model.fit_generator(data_loader.generator_from_data(X_train, Y_train),
                                           epochs=epochs,
                                           steps_per_epoch=steps_per_epoch,
                                           validation_data=data_loader.generator_from_data(X_valid, Y_valid),
                                           validation_steps=validation_steps,
                                           callbacks=[ck, log, lr_scheduler],
                                           initial_epoch=initial_epoch)

    # lr_finder.plot_loss()
    # plt.savefig("loss.png")
예제 #2
0
                      histogram_freq=0,
                      batch_size=data_loader.batch_size,
                      write_graph=True,
                      write_grads=False)

    # Use LRFinder to find effective learning rate
    lr_finder = LRFinder(1e-6, 1e-2, steps_per_epoch,
                         epochs=1)  # => (2e-4, 3e-4)
    lr_scheduler = WatchScheduler(lambda _, lr: lr / 2,
                                  min_lr=2e-4,
                                  max_lr=4e-4,
                                  watch="val_loss",
                                  watch_his_len=2)
    lr_scheduler = SGDRScheduler(min_lr=4e-5,
                                 max_lr=1e-3,
                                 steps_per_epoch=steps_per_epoch,
                                 cycle_length=15,
                                 lr_decay=0.9,
                                 mult_factor=1.2)

    X_train, Y_train, X_valid, Y_valid = DataLoader.load_data(h5_dataset_path,
                                                              frac=0.8)

    print(str(X_train))
    print(str(Y_train))

    file_path = "D:\\copus\\test_icwb2\\loglog"
    loglog = open(file_path, 'w')  # 打开文件

    tokenizer.model.fit_generator(
        data_loader.generator_from_data(X_train, Y_train, loglog),
        epochs=1,