def train():
    evaluate_queue = queue.Queue()
    evaluate_task = keras_util.EvaluateTask(evaluate_queue)
    evaluate_task.setDaemon(True)
    evaluate_task.start()
    checkpoint = keras_util.EvaluateCallback(model_config, evaluate_queue)

    start = time.time()
    print("####### start train model")
    for i in range(len(model_config.epoch)):
        print("####### lr=%f, freeze layers=%2f epoch=%d" % (
            model_config.lr[i], model_config.freeze_layers[i], model_config.epoch[i]))
        clr = keras_util.CyclicLrCallback(base_lr=model_config.lr[i], max_lr=model_config.lr[i] * 5,
                                          step_size=model_config.get_steps_per_epoch(i) / 2)

        train_flow = data_loader.KerasGenerator(model_config=model_config,
                                                featurewise_center=True,
                                                featurewise_std_normalization=True,
                                                width_shift_range=0.15,
                                                height_shift_range=0.1,
                                                horizontal_flip=True,
                                                rotation_range=10,
                                                rescale=1. / 256).flow_from_files(model_config.train_files, mode="fit",
                                                                                  target_size=model_config.image_size,
                                                                                  batch_size=
                                                                                  model_config.train_batch_size[i],
                                                                                  shuffle=True,
                                                                                  label_position=model_config.label_position)

        if i == 0:
            model = get_model(freeze_layers=model_config.freeze_layers[i], lr=model_config.lr[i],
                              output_dim=len(model_config.label_position))
            model.fit_generator(generator=train_flow,
                                steps_per_epoch=model_config.get_steps_per_epoch(i),
                                epochs=model_config.epoch[i],
                                workers=16,
                                callbacks=[checkpoint])
        else:
            model = get_model(freeze_layers=model_config.freeze_layers[i], output_dim=len(model_config.label_position),
                              lr=model_config.lr[i], weights=None)
            print("####### load weight file: %s" % model_config.get_weights_path(model_config.epoch[i - 1]))
            model.load_weights(model_config.get_weights_path(model_config.epoch[i - 1]))
            model.fit_generator(generator=train_flow,
                                steps_per_epoch=model_config.get_steps_per_epoch(i),
                                epochs=model_config.epoch[i],
                                initial_epoch=model_config.epoch[i - 1],
                                workers=16,
                                callbacks=[checkpoint])

    print("####### train model spend %d seconds" % (time.time() - start))
    print("####### train model spend %d seconds average" % ((time.time() - start) / model_config.epoch[-1]))
示例#2
0
def train():
    cb = []
    evaluate_queue = queue.Queue()
    evaluate_task = keras_util.EvaluateTask(evaluate_queue)
    evaluate_task.setDaemon(True)
    evaluate_task.start()
    checkpoint = keras_util.EvaluateCallback(model_config, evaluate_queue)
    cb.append(checkpoint)
    start = time.time()
    model_config.save_log("####### start train model")

    init_stage = model_config.get_init_stage()
    model_config.save_log("####### init stage is %d" % init_stage)

    for i in range(init_stage, len(model_config.epoch)):
        model_config.save_log(
            "####### lr=%f, freeze layers=%2f epoch=%d" %
            (model_config.lr[i], model_config.freeze_layers[i],
             model_config.epoch[i]))
        if model_config.clr:
            clr = keras_util.CyclicLrCallback(
                base_lr=model_config.lr[i],
                max_lr=model_config.lr[i] * 5,
                step_size=model_config.get_steps_per_epoch(i) / 2)
            cb.append(clr)

        train_flow = data_loader.KerasGenerator(model_config=model_config) \
            .flow_from_files(model_config.train_files,
                             mode="fit",
                             target_size=model_config.image_size,
                             batch_size=
                             model_config.train_batch_size[i],
                             shuffle=True,
                             label_position=model_config.label_position)
        if i == 0:
            model_config.save_log(
                "####### initial epoch is 0, end epoch is %d" %
                model_config.epoch[i])
            model = get_model(freeze_layers=model_config.freeze_layers[i],
                              lr=model_config.lr[i],
                              output_dim=len(model_config.label_position))
            model.fit_generator(
                generator=train_flow,
                steps_per_epoch=model_config.get_steps_per_epoch(i),
                epochs=model_config.epoch[i],
                workers=16,
                verbose=1,
                callbacks=cb)
        else:
            model = get_model(freeze_layers=model_config.freeze_layers[i],
                              output_dim=len(model_config.label_position),
                              lr=model_config.lr[i],
                              weights=None)

            if i == init_stage:
                model_config.save_log(
                    "####### load weight file: %s" %
                    model_config.get_weights_path(model_config.initial_epoch))
                model.load_weights(
                    model_config.get_weights_path(model_config.initial_epoch))

                model_config.save_log(
                    "####### initial epoch is %d, end epoch is %d" %
                    (model_config.initial_epoch, model_config.epoch[i]))
                model.fit_generator(
                    generator=train_flow,
                    steps_per_epoch=model_config.get_steps_per_epoch(i),
                    epochs=model_config.epoch[i],
                    initial_epoch=model_config.initial_epoch,
                    workers=16,
                    verbose=1,
                    callbacks=cb)
            else:
                model_config.save_log(
                    "####### load weight file: %s" %
                    model_config.get_weights_path(model_config.epoch[i - 1]))
                model.load_weights(
                    model_config.get_weights_path(model_config.epoch[i - 1]))

                model_config.save_log(
                    "####### initial epoch is %d, end epoch is %d" %
                    (model_config.epoch[i - 1], model_config.epoch[i]))
                model.fit_generator(
                    generator=train_flow,
                    steps_per_epoch=model_config.get_steps_per_epoch(i),
                    epochs=model_config.epoch[i],
                    initial_epoch=model_config.epoch[i - 1],
                    workers=16,
                    verbose=1,
                    callbacks=cb)

    model_config.save_log("####### train model spend %d seconds" %
                          (time.time() - start))
    model_config.save_log("####### train model spend %d seconds average" %
                          ((time.time() - start) / model_config.epoch[-1]))

    # 等待最后一次预测结束
    time.sleep(60)